@ -0,0 +1,5 @@
|
|||||||
|
.pio
|
||||||
|
.vscode/.browse.c_cpp.db*
|
||||||
|
.vscode/c_cpp_properties.json
|
||||||
|
.vscode/launch.json
|
||||||
|
.vscode/ipch
|
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
// See http://go.microsoft.com/fwlink/?LinkId=827846
|
||||||
|
// for the documentation about the extensions.json format
|
||||||
|
"recommendations": [
|
||||||
|
"platformio.platformio-ide"
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,39 @@
|
|||||||
|
|
||||||
|
This directory is intended for project header files.
|
||||||
|
|
||||||
|
A header file is a file containing C declarations and macro definitions
|
||||||
|
to be shared between several project source files. You request the use of a
|
||||||
|
header file in your project source file (C, C++, etc) located in `src` folder
|
||||||
|
by including it, with the C preprocessing directive `#include'.
|
||||||
|
|
||||||
|
```src/main.c
|
||||||
|
|
||||||
|
#include "header.h"
|
||||||
|
|
||||||
|
int main (void)
|
||||||
|
{
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Including a header file produces the same results as copying the header file
|
||||||
|
into each source file that needs it. Such copying would be time-consuming
|
||||||
|
and error-prone. With a header file, the related declarations appear
|
||||||
|
in only one place. If they need to be changed, they can be changed in one
|
||||||
|
place, and programs that include the header file will automatically use the
|
||||||
|
new version when next recompiled. The header file eliminates the labor of
|
||||||
|
finding and changing all the copies as well as the risk that a failure to
|
||||||
|
find one copy will result in inconsistencies within a program.
|
||||||
|
|
||||||
|
In C, the usual convention is to give header files names that end with `.h'.
|
||||||
|
It is most portable to use only letters, digits, dashes, and underscores in
|
||||||
|
header file names, and at most one dot.
|
||||||
|
|
||||||
|
Read more about using header files in official GCC documentation:
|
||||||
|
|
||||||
|
* Include Syntax
|
||||||
|
* Include Operation
|
||||||
|
* Once-Only Headers
|
||||||
|
* Computed Includes
|
||||||
|
|
||||||
|
https://gcc.gnu.org/onlinedocs/cpp/Header-Files.html
|
@ -0,0 +1,46 @@
|
|||||||
|
|
||||||
|
This directory is intended for project specific (private) libraries.
|
||||||
|
PlatformIO will compile them to static libraries and link into executable file.
|
||||||
|
|
||||||
|
The source code of each library should be placed in a an own separate directory
|
||||||
|
("lib/your_library_name/[here are source files]").
|
||||||
|
|
||||||
|
For example, see a structure of the following two libraries `Foo` and `Bar`:
|
||||||
|
|
||||||
|
|--lib
|
||||||
|
| |
|
||||||
|
| |--Bar
|
||||||
|
| | |--docs
|
||||||
|
| | |--examples
|
||||||
|
| | |--src
|
||||||
|
| | |- Bar.c
|
||||||
|
| | |- Bar.h
|
||||||
|
| | |- library.json (optional, custom build options, etc) https://docs.platformio.org/page/librarymanager/config.html
|
||||||
|
| |
|
||||||
|
| |--Foo
|
||||||
|
| | |- Foo.c
|
||||||
|
| | |- Foo.h
|
||||||
|
| |
|
||||||
|
| |- README --> THIS FILE
|
||||||
|
|
|
||||||
|
|- platformio.ini
|
||||||
|
|--src
|
||||||
|
|- main.c
|
||||||
|
|
||||||
|
and a contents of `src/main.c`:
|
||||||
|
```
|
||||||
|
#include <Foo.h>
|
||||||
|
#include <Bar.h>
|
||||||
|
|
||||||
|
int main (void)
|
||||||
|
{
|
||||||
|
...
|
||||||
|
}
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
PlatformIO Library Dependency Finder will find automatically dependent
|
||||||
|
libraries scanning project source files.
|
||||||
|
|
||||||
|
More information about PlatformIO Library Dependency Finder
|
||||||
|
- https://docs.platformio.org/page/librarymanager/ldf.html
|
@ -0,0 +1,26 @@
|
|||||||
|
; PlatformIO Project Configuration File
|
||||||
|
;
|
||||||
|
; Build options: build flags, source filter
|
||||||
|
; Upload options: custom upload port, speed and extra flags
|
||||||
|
; Library options: dependencies, extra library storages
|
||||||
|
; Advanced options: extra scripting
|
||||||
|
;
|
||||||
|
; Please visit documentation for the other options and examples
|
||||||
|
; https://docs.platformio.org/page/projectconf.html
|
||||||
|
|
||||||
|
[env:seeed_wio_terminal]
|
||||||
|
platform = atmelsam
|
||||||
|
board = seeed_wio_terminal
|
||||||
|
framework = arduino
|
||||||
|
lib_deps =
|
||||||
|
seeed-studio/Seeed Arduino rpcWiFi
|
||||||
|
seeed-studio/Seeed Arduino FS
|
||||||
|
seeed-studio/Seeed Arduino SFUD
|
||||||
|
seeed-studio/Seeed Arduino rpcUnified
|
||||||
|
seeed-studio/Seeed_Arduino_mbedtls
|
||||||
|
seeed-studio/Seeed Arduino RTC
|
||||||
|
bblanchon/ArduinoJson @ 6.17.3
|
||||||
|
build_flags =
|
||||||
|
-w
|
||||||
|
-DARDUCAM_SHIELD_V2
|
||||||
|
-DOV2640_CAM
|
@ -0,0 +1,160 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <ArduCAM.h>
|
||||||
|
#include <Wire.h>
|
||||||
|
|
||||||
|
class Camera
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
Camera(int format, int image_size) : _arducam(OV2640, PIN_SPI_SS)
|
||||||
|
{
|
||||||
|
_format = format;
|
||||||
|
_image_size = image_size;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool init()
|
||||||
|
{
|
||||||
|
// Reset the CPLD
|
||||||
|
_arducam.write_reg(0x07, 0x80);
|
||||||
|
delay(100);
|
||||||
|
|
||||||
|
_arducam.write_reg(0x07, 0x00);
|
||||||
|
delay(100);
|
||||||
|
|
||||||
|
// Check if the ArduCAM SPI bus is OK
|
||||||
|
_arducam.write_reg(ARDUCHIP_TEST1, 0x55);
|
||||||
|
if (_arducam.read_reg(ARDUCHIP_TEST1) != 0x55)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change MCU mode
|
||||||
|
_arducam.set_mode(MCU2LCD_MODE);
|
||||||
|
|
||||||
|
uint8_t vid, pid;
|
||||||
|
|
||||||
|
// Check if the camera module type is OV2640
|
||||||
|
_arducam.wrSensorReg8_8(0xff, 0x01);
|
||||||
|
_arducam.rdSensorReg8_8(OV2640_CHIPID_HIGH, &vid);
|
||||||
|
_arducam.rdSensorReg8_8(OV2640_CHIPID_LOW, &pid);
|
||||||
|
if ((vid != 0x26) && ((pid != 0x41) || (pid != 0x42)))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
_arducam.set_format(_format);
|
||||||
|
_arducam.InitCAM();
|
||||||
|
_arducam.OV2640_set_JPEG_size(_image_size);
|
||||||
|
_arducam.OV2640_set_Light_Mode(Auto);
|
||||||
|
_arducam.OV2640_set_Special_effects(Normal);
|
||||||
|
delay(1000);
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void startCapture()
|
||||||
|
{
|
||||||
|
_arducam.flush_fifo();
|
||||||
|
_arducam.clear_fifo_flag();
|
||||||
|
_arducam.start_capture();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool captureReady()
|
||||||
|
{
|
||||||
|
return _arducam.get_bit(ARDUCHIP_TRIG, CAP_DONE_MASK);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool readImageToBuffer(byte **buffer, uint32_t &buffer_length)
|
||||||
|
{
|
||||||
|
if (!captureReady()) return false;
|
||||||
|
|
||||||
|
// Get the image file length
|
||||||
|
uint32_t length = _arducam.read_fifo_length();
|
||||||
|
buffer_length = length;
|
||||||
|
|
||||||
|
if (length >= MAX_FIFO_SIZE)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (length == 0)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// create the buffer
|
||||||
|
byte *buf = new byte[length];
|
||||||
|
|
||||||
|
uint8_t temp = 0, temp_last = 0;
|
||||||
|
int i = 0;
|
||||||
|
uint32_t buffer_pos = 0;
|
||||||
|
bool is_header = false;
|
||||||
|
|
||||||
|
_arducam.CS_LOW();
|
||||||
|
_arducam.set_fifo_burst();
|
||||||
|
|
||||||
|
while (length--)
|
||||||
|
{
|
||||||
|
temp_last = temp;
|
||||||
|
temp = SPI.transfer(0x00);
|
||||||
|
//Read JPEG data from FIFO
|
||||||
|
if ((temp == 0xD9) && (temp_last == 0xFF)) //If find the end ,break while,
|
||||||
|
{
|
||||||
|
buf[buffer_pos] = temp;
|
||||||
|
|
||||||
|
buffer_pos++;
|
||||||
|
i++;
|
||||||
|
|
||||||
|
_arducam.CS_HIGH();
|
||||||
|
}
|
||||||
|
if (is_header == true)
|
||||||
|
{
|
||||||
|
//Write image data to buffer if not full
|
||||||
|
if (i < 256)
|
||||||
|
{
|
||||||
|
buf[buffer_pos] = temp;
|
||||||
|
buffer_pos++;
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
_arducam.CS_HIGH();
|
||||||
|
|
||||||
|
i = 0;
|
||||||
|
buf[buffer_pos] = temp;
|
||||||
|
|
||||||
|
buffer_pos++;
|
||||||
|
i++;
|
||||||
|
|
||||||
|
_arducam.CS_LOW();
|
||||||
|
_arducam.set_fifo_burst();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if ((temp == 0xD8) & (temp_last == 0xFF))
|
||||||
|
{
|
||||||
|
is_header = true;
|
||||||
|
|
||||||
|
buf[buffer_pos] = temp_last;
|
||||||
|
buffer_pos++;
|
||||||
|
i++;
|
||||||
|
|
||||||
|
buf[buffer_pos] = temp;
|
||||||
|
buffer_pos++;
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_arducam.clear_fifo_flag();
|
||||||
|
|
||||||
|
_arducam.set_format(_format);
|
||||||
|
_arducam.InitCAM();
|
||||||
|
_arducam.OV2640_set_JPEG_size(_image_size);
|
||||||
|
|
||||||
|
// return the buffer
|
||||||
|
*buffer = buf;
|
||||||
|
}
|
||||||
|
|
||||||
|
private:
|
||||||
|
ArduCAM _arducam;
|
||||||
|
int _format;
|
||||||
|
int _image_size;
|
||||||
|
};
|
@ -0,0 +1,49 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
|
||||||
|
// WiFi credentials
|
||||||
|
const char *SSID = "<SSID>";
|
||||||
|
const char *PASSWORD = "<PASSWORD>";
|
||||||
|
|
||||||
|
const char *PREDICTION_URL = "<PREDICTION_URL>";
|
||||||
|
const char *PREDICTION_KEY = "<PREDICTION_KEY>";
|
||||||
|
|
||||||
|
// Microsoft Azure DigiCert Global Root G2 global certificate
|
||||||
|
const char *CERTIFICATE =
|
||||||
|
"-----BEGIN CERTIFICATE-----\r\n"
|
||||||
|
"MIIF8zCCBNugAwIBAgIQAueRcfuAIek/4tmDg0xQwDANBgkqhkiG9w0BAQwFADBh\r\n"
|
||||||
|
"MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\r\n"
|
||||||
|
"d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH\r\n"
|
||||||
|
"MjAeFw0yMDA3MjkxMjMwMDBaFw0yNDA2MjcyMzU5NTlaMFkxCzAJBgNVBAYTAlVT\r\n"
|
||||||
|
"MR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKjAoBgNVBAMTIU1pY3Jv\r\n"
|
||||||
|
"c29mdCBBenVyZSBUTFMgSXNzdWluZyBDQSAwNjCCAiIwDQYJKoZIhvcNAQEBBQAD\r\n"
|
||||||
|
"ggIPADCCAgoCggIBALVGARl56bx3KBUSGuPc4H5uoNFkFH4e7pvTCxRi4j/+z+Xb\r\n"
|
||||||
|
"wjEz+5CipDOqjx9/jWjskL5dk7PaQkzItidsAAnDCW1leZBOIi68Lff1bjTeZgMY\r\n"
|
||||||
|
"iwdRd3Y39b/lcGpiuP2d23W95YHkMMT8IlWosYIX0f4kYb62rphyfnAjYb/4Od99\r\n"
|
||||||
|
"ThnhlAxGtfvSbXcBVIKCYfZgqRvV+5lReUnd1aNjRYVzPOoifgSx2fRyy1+pO1Uz\r\n"
|
||||||
|
"aMMNnIOE71bVYW0A1hr19w7kOb0KkJXoALTDDj1ukUEDqQuBfBxReL5mXiu1O7WG\r\n"
|
||||||
|
"0vltg0VZ/SZzctBsdBlx1BkmWYBW261KZgBivrql5ELTKKd8qgtHcLQA5fl6JB0Q\r\n"
|
||||||
|
"gs5XDaWehN86Gps5JW8ArjGtjcWAIP+X8CQaWfaCnuRm6Bk/03PQWhgdi84qwA0s\r\n"
|
||||||
|
"sRfFJwHUPTNSnE8EiGVk2frt0u8PG1pwSQsFuNJfcYIHEv1vOzP7uEOuDydsmCjh\r\n"
|
||||||
|
"lxuoK2n5/2aVR3BMTu+p4+gl8alXoBycyLmj3J/PUgqD8SL5fTCUegGsdia/Sa60\r\n"
|
||||||
|
"N2oV7vQ17wjMN+LXa2rjj/b4ZlZgXVojDmAjDwIRdDUujQu0RVsJqFLMzSIHpp2C\r\n"
|
||||||
|
"Zp7mIoLrySay2YYBu7SiNwL95X6He2kS8eefBBHjzwW/9FxGqry57i71c2cDAgMB\r\n"
|
||||||
|
"AAGjggGtMIIBqTAdBgNVHQ4EFgQU1cFnOsKjnfR3UltZEjgp5lVou6UwHwYDVR0j\r\n"
|
||||||
|
"BBgwFoAUTiJUIBiV5uNu5g/6+rkS7QYXjzkwDgYDVR0PAQH/BAQDAgGGMB0GA1Ud\r\n"
|
||||||
|
"JQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/AgEAMHYG\r\n"
|
||||||
|
"CCsGAQUFBwEBBGowaDAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQu\r\n"
|
||||||
|
"Y29tMEAGCCsGAQUFBzAChjRodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGln\r\n"
|
||||||
|
"aUNlcnRHbG9iYWxSb290RzIuY3J0MHsGA1UdHwR0MHIwN6A1oDOGMWh0dHA6Ly9j\r\n"
|
||||||
|
"cmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEdsb2JhbFJvb3RHMi5jcmwwN6A1oDOG\r\n"
|
||||||
|
"MWh0dHA6Ly9jcmw0LmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEdsb2JhbFJvb3RHMi5j\r\n"
|
||||||
|
"cmwwHQYDVR0gBBYwFDAIBgZngQwBAgEwCAYGZ4EMAQICMBAGCSsGAQQBgjcVAQQD\r\n"
|
||||||
|
"AgEAMA0GCSqGSIb3DQEBDAUAA4IBAQB2oWc93fB8esci/8esixj++N22meiGDjgF\r\n"
|
||||||
|
"+rA2LUK5IOQOgcUSTGKSqF9lYfAxPjrqPjDCUPHCURv+26ad5P/BYtXtbmtxJWu+\r\n"
|
||||||
|
"cS5BhMDPPeG3oPZwXRHBJFAkY4O4AF7RIAAUW6EzDflUoDHKv83zOiPfYGcpHc9s\r\n"
|
||||||
|
"kxAInCedk7QSgXvMARjjOqdakor21DTmNIUotxo8kHv5hwRlGhBJwps6fEVi1Bt0\r\n"
|
||||||
|
"trpM/3wYxlr473WSPUFZPgP1j519kLpWOJ8z09wxay+Br29irPcBYv0GMXlHqThy\r\n"
|
||||||
|
"8y4m/HyTQeI2IMvMrQnwqPpY+rLIXyviI2vLoI+4xKE4Rn38ZZ8m\r\n"
|
||||||
|
"-----END CERTIFICATE-----\r\n";
|
@ -0,0 +1,125 @@
|
|||||||
|
#include <Arduino.h>
|
||||||
|
#include <ArduinoJson.h>
|
||||||
|
#include <HTTPClient.h>
|
||||||
|
#include <rpcWiFi.h>
|
||||||
|
#include "SD/Seeed_SD.h"
|
||||||
|
#include <Seeed_FS.h>
|
||||||
|
#include <SPI.h>
|
||||||
|
#include <WiFiClientSecure.h>
|
||||||
|
|
||||||
|
#include "config.h"
|
||||||
|
#include "camera.h"
|
||||||
|
|
||||||
|
Camera camera = Camera(JPEG, OV2640_640x480);
|
||||||
|
|
||||||
|
WiFiClientSecure client;
|
||||||
|
|
||||||
|
void setupCamera()
|
||||||
|
{
|
||||||
|
pinMode(PIN_SPI_SS, OUTPUT);
|
||||||
|
digitalWrite(PIN_SPI_SS, HIGH);
|
||||||
|
|
||||||
|
Wire.begin();
|
||||||
|
SPI.begin();
|
||||||
|
|
||||||
|
if (!camera.init())
|
||||||
|
{
|
||||||
|
Serial.println("Error setting up the camera!");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void connectWiFi()
|
||||||
|
{
|
||||||
|
while (WiFi.status() != WL_CONNECTED)
|
||||||
|
{
|
||||||
|
Serial.println("Connecting to WiFi..");
|
||||||
|
WiFi.begin(SSID, PASSWORD);
|
||||||
|
delay(500);
|
||||||
|
}
|
||||||
|
|
||||||
|
client.setCACert(CERTIFICATE);
|
||||||
|
Serial.println("Connected!");
|
||||||
|
}
|
||||||
|
|
||||||
|
void setup()
|
||||||
|
{
|
||||||
|
Serial.begin(9600);
|
||||||
|
|
||||||
|
while (!Serial)
|
||||||
|
; // Wait for Serial to be ready
|
||||||
|
|
||||||
|
delay(1000);
|
||||||
|
|
||||||
|
connectWiFi();
|
||||||
|
|
||||||
|
setupCamera();
|
||||||
|
|
||||||
|
pinMode(WIO_KEY_C, INPUT_PULLUP);
|
||||||
|
}
|
||||||
|
|
||||||
|
void classifyImage(byte *buffer, uint32_t length)
|
||||||
|
{
|
||||||
|
HTTPClient httpClient;
|
||||||
|
httpClient.begin(client, PREDICTION_URL);
|
||||||
|
httpClient.addHeader("Content-Type", "application/octet-stream");
|
||||||
|
httpClient.addHeader("Prediction-Key", PREDICTION_KEY);
|
||||||
|
|
||||||
|
int httpResponseCode = httpClient.POST(buffer, length);
|
||||||
|
|
||||||
|
if (httpResponseCode == 200)
|
||||||
|
{
|
||||||
|
String result = httpClient.getString();
|
||||||
|
|
||||||
|
DynamicJsonDocument doc(1024);
|
||||||
|
deserializeJson(doc, result.c_str());
|
||||||
|
|
||||||
|
JsonObject obj = doc.as<JsonObject>();
|
||||||
|
JsonArray predictions = obj["predictions"].as<JsonArray>();
|
||||||
|
|
||||||
|
for(JsonVariant prediction : predictions)
|
||||||
|
{
|
||||||
|
String tag = prediction["tagName"].as<String>();
|
||||||
|
float probability = prediction["probability"].as<float>();
|
||||||
|
|
||||||
|
char buff[32];
|
||||||
|
sprintf(buff, "%s:\t%.2f%%", tag.c_str(), probability * 100.0);
|
||||||
|
Serial.println(buff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
httpClient.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
void buttonPressed()
|
||||||
|
{
|
||||||
|
camera.startCapture();
|
||||||
|
|
||||||
|
while (!camera.captureReady())
|
||||||
|
delay(100);
|
||||||
|
|
||||||
|
Serial.println("Image captured");
|
||||||
|
|
||||||
|
byte *buffer;
|
||||||
|
uint32_t length;
|
||||||
|
|
||||||
|
if (camera.readImageToBuffer(&buffer, length))
|
||||||
|
{
|
||||||
|
Serial.print("Image read to buffer with length ");
|
||||||
|
Serial.println(length);
|
||||||
|
|
||||||
|
classifyImage(buffer, length);
|
||||||
|
|
||||||
|
delete (buffer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void loop()
|
||||||
|
{
|
||||||
|
if (digitalRead(WIO_KEY_C) == LOW)
|
||||||
|
{
|
||||||
|
buttonPressed();
|
||||||
|
delay(2000);
|
||||||
|
}
|
||||||
|
|
||||||
|
delay(200);
|
||||||
|
}
|
@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
This directory is intended for PlatformIO Unit Testing and project tests.
|
||||||
|
|
||||||
|
Unit Testing is a software testing method by which individual units of
|
||||||
|
source code, sets of one or more MCU program modules together with associated
|
||||||
|
control data, usage procedures, and operating procedures, are tested to
|
||||||
|
determine whether they are fit for use. Unit testing finds problems early
|
||||||
|
in the development cycle.
|
||||||
|
|
||||||
|
More information about PlatformIO Unit Testing:
|
||||||
|
- https://docs.platformio.org/page/plus/unit-testing.html
|
@ -1,3 +1,215 @@
|
|||||||
# Classify an image - Wio Terminal
|
# Classify an image - Wio Terminal
|
||||||
|
|
||||||
Coming soon!
|
In this part of the lesson, you will add send the image captured by the camera to the Custom Vision service to classify it.
|
||||||
|
|
||||||
|
## Classify an image
|
||||||
|
|
||||||
|
The Custom Vision service has a REST API you can call from the Wio Terminal use to classify images. THis REST API is accessed over an HTTPS connection - a secure HTTP connection.
|
||||||
|
|
||||||
|
When interacting with HTTPS endpoints, the client code needs to request the public key certificate from the server being accessed, and use that to encrypt the traffic it sends. Your web browser does this automatically, but microcontrollers do not. You will need to request this certificate manually and use it to create a secure connection to the REST API. These certificates don't change, so once you have a certificate, it can be hard coded in your application.
|
||||||
|
|
||||||
|
These certificates contain public keys, and don't need to be kept secure. You can use them in your source code and share them in public on places like GitHub.
|
||||||
|
|
||||||
|
### Task - set up a SSL client
|
||||||
|
|
||||||
|
1. Open the `fruit-quality-detector` app project if it's not already open
|
||||||
|
|
||||||
|
1. Open the `config.h` header file, and add the following:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
const char *CERTIFICATE =
|
||||||
|
"-----BEGIN CERTIFICATE-----\r\n"
|
||||||
|
"MIIF8zCCBNugAwIBAgIQAueRcfuAIek/4tmDg0xQwDANBgkqhkiG9w0BAQwFADBh\r\n"
|
||||||
|
"MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3\r\n"
|
||||||
|
"d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH\r\n"
|
||||||
|
"MjAeFw0yMDA3MjkxMjMwMDBaFw0yNDA2MjcyMzU5NTlaMFkxCzAJBgNVBAYTAlVT\r\n"
|
||||||
|
"MR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKjAoBgNVBAMTIU1pY3Jv\r\n"
|
||||||
|
"c29mdCBBenVyZSBUTFMgSXNzdWluZyBDQSAwNjCCAiIwDQYJKoZIhvcNAQEBBQAD\r\n"
|
||||||
|
"ggIPADCCAgoCggIBALVGARl56bx3KBUSGuPc4H5uoNFkFH4e7pvTCxRi4j/+z+Xb\r\n"
|
||||||
|
"wjEz+5CipDOqjx9/jWjskL5dk7PaQkzItidsAAnDCW1leZBOIi68Lff1bjTeZgMY\r\n"
|
||||||
|
"iwdRd3Y39b/lcGpiuP2d23W95YHkMMT8IlWosYIX0f4kYb62rphyfnAjYb/4Od99\r\n"
|
||||||
|
"ThnhlAxGtfvSbXcBVIKCYfZgqRvV+5lReUnd1aNjRYVzPOoifgSx2fRyy1+pO1Uz\r\n"
|
||||||
|
"aMMNnIOE71bVYW0A1hr19w7kOb0KkJXoALTDDj1ukUEDqQuBfBxReL5mXiu1O7WG\r\n"
|
||||||
|
"0vltg0VZ/SZzctBsdBlx1BkmWYBW261KZgBivrql5ELTKKd8qgtHcLQA5fl6JB0Q\r\n"
|
||||||
|
"gs5XDaWehN86Gps5JW8ArjGtjcWAIP+X8CQaWfaCnuRm6Bk/03PQWhgdi84qwA0s\r\n"
|
||||||
|
"sRfFJwHUPTNSnE8EiGVk2frt0u8PG1pwSQsFuNJfcYIHEv1vOzP7uEOuDydsmCjh\r\n"
|
||||||
|
"lxuoK2n5/2aVR3BMTu+p4+gl8alXoBycyLmj3J/PUgqD8SL5fTCUegGsdia/Sa60\r\n"
|
||||||
|
"N2oV7vQ17wjMN+LXa2rjj/b4ZlZgXVojDmAjDwIRdDUujQu0RVsJqFLMzSIHpp2C\r\n"
|
||||||
|
"Zp7mIoLrySay2YYBu7SiNwL95X6He2kS8eefBBHjzwW/9FxGqry57i71c2cDAgMB\r\n"
|
||||||
|
"AAGjggGtMIIBqTAdBgNVHQ4EFgQU1cFnOsKjnfR3UltZEjgp5lVou6UwHwYDVR0j\r\n"
|
||||||
|
"BBgwFoAUTiJUIBiV5uNu5g/6+rkS7QYXjzkwDgYDVR0PAQH/BAQDAgGGMB0GA1Ud\r\n"
|
||||||
|
"JQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjASBgNVHRMBAf8ECDAGAQH/AgEAMHYG\r\n"
|
||||||
|
"CCsGAQUFBwEBBGowaDAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQu\r\n"
|
||||||
|
"Y29tMEAGCCsGAQUFBzAChjRodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGln\r\n"
|
||||||
|
"aUNlcnRHbG9iYWxSb290RzIuY3J0MHsGA1UdHwR0MHIwN6A1oDOGMWh0dHA6Ly9j\r\n"
|
||||||
|
"cmwzLmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEdsb2JhbFJvb3RHMi5jcmwwN6A1oDOG\r\n"
|
||||||
|
"MWh0dHA6Ly9jcmw0LmRpZ2ljZXJ0LmNvbS9EaWdpQ2VydEdsb2JhbFJvb3RHMi5j\r\n"
|
||||||
|
"cmwwHQYDVR0gBBYwFDAIBgZngQwBAgEwCAYGZ4EMAQICMBAGCSsGAQQBgjcVAQQD\r\n"
|
||||||
|
"AgEAMA0GCSqGSIb3DQEBDAUAA4IBAQB2oWc93fB8esci/8esixj++N22meiGDjgF\r\n"
|
||||||
|
"+rA2LUK5IOQOgcUSTGKSqF9lYfAxPjrqPjDCUPHCURv+26ad5P/BYtXtbmtxJWu+\r\n"
|
||||||
|
"cS5BhMDPPeG3oPZwXRHBJFAkY4O4AF7RIAAUW6EzDflUoDHKv83zOiPfYGcpHc9s\r\n"
|
||||||
|
"kxAInCedk7QSgXvMARjjOqdakor21DTmNIUotxo8kHv5hwRlGhBJwps6fEVi1Bt0\r\n"
|
||||||
|
"trpM/3wYxlr473WSPUFZPgP1j519kLpWOJ8z09wxay+Br29irPcBYv0GMXlHqThy\r\n"
|
||||||
|
"8y4m/HyTQeI2IMvMrQnwqPpY+rLIXyviI2vLoI+4xKE4Rn38ZZ8m\r\n"
|
||||||
|
"-----END CERTIFICATE-----\r\n";
|
||||||
|
```
|
||||||
|
|
||||||
|
This is the *Microsoft Azure DigiCert Global Root G2 certificate* - it's one of the certificates used by many Azure services globally.
|
||||||
|
|
||||||
|
> 💁 To see that this is the certificate to use, run the following command on macOS or Linux. If you are using Windows, you can run this command using the [Windows Subsystem for Linux (WSL)](https://docs.microsoft.com/windows/wsl/?WT.mc_id=academic-17441-jabenn):
|
||||||
|
>
|
||||||
|
> ```sh
|
||||||
|
> openssl s_client -showcerts -verify 5 -connect api.cognitive.microsoft.com:443
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> The output will list the DigiCert Global Root G2 certificate.
|
||||||
|
|
||||||
|
1. Open `main.cpp` and add the following include directive:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
#include <WiFiClientSecure.h>
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Below the include directives, declare an instance of `WifiClientSecure`:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
WiFiClientSecure client;
|
||||||
|
```
|
||||||
|
|
||||||
|
This class contains code to communicate with web endpoints over HTTPS.
|
||||||
|
|
||||||
|
1. In the `connectWiFi` method, set the WiFiClientSecure to use the DigiCert Global Root G2 certificate:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
client.setCACert(CERTIFICATE);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Task - classify an image
|
||||||
|
|
||||||
|
1. Add the following as an additional line to the `lib_deps` list in the `platformio.ini` file:
|
||||||
|
|
||||||
|
```ini
|
||||||
|
bblanchon/ArduinoJson @ 6.17.3
|
||||||
|
```
|
||||||
|
|
||||||
|
This imports [ArduinoJson](https://arduinojson.org), an Arduino JSON library, and will be used to decode the JSON response from the REST API.
|
||||||
|
|
||||||
|
1. In `config.h`, add constants for the prediction URL and Key from the Custom Vision service:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
const char *PREDICTION_URL = "<PREDICTION_URL>";
|
||||||
|
const char *PREDICTION_KEY = "<PREDICTION_KEY>";
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace `<PREDICTION_URL>` with the prediction URL from Custom Vision. Replace `<PREDICTION_KEY>` with the prediction key.
|
||||||
|
|
||||||
|
1. In `main.cpp`, add an include directive for the ArduinoJson library:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
#include <ArduinoJSON.h>
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Add the following function to `main.cpp`, above the `buttonPressed` function.
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
void classifyImage(byte *buffer, uint32_t length)
|
||||||
|
{
|
||||||
|
HTTPClient httpClient;
|
||||||
|
httpClient.begin(client, PREDICTION_URL);
|
||||||
|
httpClient.addHeader("Content-Type", "application/octet-stream");
|
||||||
|
httpClient.addHeader("Prediction-Key", PREDICTION_KEY);
|
||||||
|
|
||||||
|
int httpResponseCode = httpClient.POST(buffer, length);
|
||||||
|
|
||||||
|
if (httpResponseCode == 200)
|
||||||
|
{
|
||||||
|
String result = httpClient.getString();
|
||||||
|
|
||||||
|
DynamicJsonDocument doc(1024);
|
||||||
|
deserializeJson(doc, result.c_str());
|
||||||
|
|
||||||
|
JsonObject obj = doc.as<JsonObject>();
|
||||||
|
JsonArray predictions = obj["predictions"].as<JsonArray>();
|
||||||
|
|
||||||
|
for(JsonVariant prediction : predictions)
|
||||||
|
{
|
||||||
|
String tag = prediction["tagName"].as<String>();
|
||||||
|
float probability = prediction["probability"].as<float>();
|
||||||
|
|
||||||
|
char buff[32];
|
||||||
|
sprintf(buff, "%s:\t%.2f%%", tag.c_str(), probability * 100.0);
|
||||||
|
Serial.println(buff);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
httpClient.end();
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This code starts by declaring an `HTTPClient` - a class that contains methods to interact with REST APIs. It then connects the client to the prediction URL using the `WiFiClientSecure` instance that was set up with the Azure public key.
|
||||||
|
|
||||||
|
Once connected, it sends headers - information about the upcoming request that will be made against the REST API. The `Content-Type` header indicates the API call will send raw binary data, the `Prediction-Key` header passes the Custom Vision prediction key.
|
||||||
|
|
||||||
|
Next a POST request is made to the HTTP client, uploading a byte array. This will contain the JPEG image captured from the camera when this function is called.
|
||||||
|
|
||||||
|
> 💁 POST request are meant for sending data, and getting a response. There are other request types such as GET requests that retrieve data. GET requests are used by your web browser to load web pages.
|
||||||
|
|
||||||
|
The POST request returns a response status code. These are well-defined values, with 200 meaning **OK** - the POST request was successful.
|
||||||
|
|
||||||
|
> 💁 You can see all the response status codes in the [List of HTTP status codes page on Wikipedia](https://wikipedia.org/wiki/List_of_HTTP_status_codes)
|
||||||
|
|
||||||
|
If a 200 is returned, the result is read from the HTTP client. This is a text response from the REST API with the results of the prediction as a JSON document. The JSON is in the following format:
|
||||||
|
|
||||||
|
```jSON
|
||||||
|
{
|
||||||
|
"id":"45d614d3-7d6f-47e9-8fa2-04f237366a16",
|
||||||
|
"project":"135607e5-efac-4855-8afb-c93af3380531",
|
||||||
|
"iteration":"04f1c1fa-11ec-4e59-bb23-4c7aca353665",
|
||||||
|
"created":"2021-06-10T17:58:58.959Z",
|
||||||
|
"predictions":[
|
||||||
|
{
|
||||||
|
"probability":0.5582016,
|
||||||
|
"tagId":"05a432ea-9718-4098-b14f-5f0688149d64",
|
||||||
|
"tagName":"ripe"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"probability":0.44179836,
|
||||||
|
"tagId":"bb091037-16e5-418e-a9ea-31c6a2920f17",
|
||||||
|
"tagName":"unripe"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The important part here is the `predictions` array. This contains the predictions, with one entry for each tag containing the tag name and the probability. The probabilities returned are floating point numbers from 0-1, with 0 being a 0% chance of matching the tag, and 1 being a 100% chance.
|
||||||
|
|
||||||
|
> 💁 Image classifiers will return the percentages for all tags that have been used. Each tag will have a probability that the image matches that tag.
|
||||||
|
|
||||||
|
This JSON is decoded, and the probabilities for each tag are sent to the serial monitor.
|
||||||
|
|
||||||
|
1. In the `buttonPressed` function, either replace the code that saves to the SD card with a call to `classifyImage`, or add it after the image is written, but **before** the buffer is deleted:
|
||||||
|
|
||||||
|
```cpp
|
||||||
|
classifyImage(buffer, length);
|
||||||
|
```
|
||||||
|
|
||||||
|
> 💁 If you replace the code that saves to the SD card you can clean up your code removing the `setupSDCard` and `saveToSDCard` functions.
|
||||||
|
|
||||||
|
1. Upload and run your code. Point the camera at some fruit and press the C button. You will see the output in the serial monitor:
|
||||||
|
|
||||||
|
```output
|
||||||
|
Connecting to WiFi..
|
||||||
|
Connected!
|
||||||
|
Image captured
|
||||||
|
Image read to buffer with length 8200
|
||||||
|
ripe: 56.84%
|
||||||
|
unripe: 43.16%
|
||||||
|
```
|
||||||
|
|
||||||
|
You will be able to see the image that was taken, and these values in the **Predictions** tab in Custom Vision.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
> 💁 You can find this code in the [code-classify/wio-terminal](code-classify/wio-terminal) folder.
|
||||||
|
|
||||||
|
😀 Your fruit quality classifier program was a success!
|
||||||
|
@ -1,9 +1,18 @@
|
|||||||
#
|
# Build a fruit quality detector
|
||||||
|
|
||||||
## Instructions
|
## Instructions
|
||||||
|
|
||||||
|
Build the fruit quality detector!
|
||||||
|
|
||||||
|
Take everything you have learned so far and build the prototype fruit quality detector. Trigger image classification based off proximity using an AI model running on the edge, store the results of the classification in storage, and control an LED based off the ripeness of the fruit.
|
||||||
|
|
||||||
|
You should be able to piece this together using code you have previously written in all the lessons so far.
|
||||||
|
|
||||||
## Rubric
|
## Rubric
|
||||||
|
|
||||||
| Criteria | Exemplary | Adequate | Needs Improvement |
|
| Criteria | Exemplary | Adequate | Needs Improvement |
|
||||||
| -------- | --------- | -------- | ----------------- |
|
| -------- | --------- | -------- | ----------------- |
|
||||||
| | | | |
|
| Configure all the services | Was able to set up an IoT Hub, Azure functions application and Azure storage | Was able to set up the IoT Hub, but not either the Azure functions app or Azure storage | Was unable to set up any internet IoT services |
|
||||||
|
| Monitor proximity and send the data to IoT Hub if an object is closer than a pre-defined distance and trigger the camera via a command | Was able to measure distance and send a message to an IoT Hub when an object is close enough, and have a command send to trigger the camera | Was able to measure proximity and send to IoT Hub, but unable to get a command sent to the camera | Was unable to measure distance and send a message to IoT Hub, or trigger a command |
|
||||||
|
| Capture an image, classify it and send the results to IoT Hub | Was able to capture an image, classify it using an edge device and send the results to IoT Hub | Was able to classify the image but not using an edge device, or was unable to send the results to IoT Hub | Was unable to classify an image |
|
||||||
|
| Turn the LED on or off depending on the results of the classification using a command sent to a device | Was able to turn an LED on via a command if the fruit was unripe | Was able to send the command to the device but not control the LED | Was unable to send a command to control the LED |
|
||||||
|
@ -1,36 +0,0 @@
|
|||||||
import io
|
|
||||||
import time
|
|
||||||
from picamera import PiCamera
|
|
||||||
|
|
||||||
from azure.cognitiveservices.vision.customvision.prediction import CustomVisionPredictionClient
|
|
||||||
from msrest.authentication import ApiKeyCredentials
|
|
||||||
|
|
||||||
camera = PiCamera()
|
|
||||||
camera.resolution = (640, 480)
|
|
||||||
camera.rotation = 0
|
|
||||||
|
|
||||||
time.sleep(2)
|
|
||||||
|
|
||||||
image = io.BytesIO()
|
|
||||||
camera.capture(image, 'jpeg')
|
|
||||||
image.seek(0)
|
|
||||||
|
|
||||||
with open('image.jpg', 'wb') as image_file:
|
|
||||||
image_file.write(image.read())
|
|
||||||
|
|
||||||
prediction_url = '<prediction_url>'
|
|
||||||
prediction_key = '<prediction key>'
|
|
||||||
|
|
||||||
parts = prediction_url.split('/')
|
|
||||||
endpoint = 'https://' + parts[2]
|
|
||||||
project_id = parts[6]
|
|
||||||
iteration_name = parts[9]
|
|
||||||
|
|
||||||
prediction_credentials = ApiKeyCredentials(in_headers={"Prediction-key": prediction_key})
|
|
||||||
predictor = CustomVisionPredictionClient(endpoint, prediction_credentials)
|
|
||||||
|
|
||||||
image.seek(0)
|
|
||||||
results = predictor.classify_image(project_id, iteration_name, image)
|
|
||||||
|
|
||||||
for prediction in results.predictions:
|
|
||||||
print(f'{prediction.tag_name}:\t{prediction.probability * 100:.2f}%')
|
|
@ -1,36 +0,0 @@
|
|||||||
from counterfit_connection import CounterFitConnection
|
|
||||||
CounterFitConnection.init('127.0.0.1', 5000)
|
|
||||||
|
|
||||||
import io
|
|
||||||
from counterfit_shims_picamera import PiCamera
|
|
||||||
|
|
||||||
from azure.cognitiveservices.vision.customvision.prediction import CustomVisionPredictionClient
|
|
||||||
from msrest.authentication import ApiKeyCredentials
|
|
||||||
|
|
||||||
camera = PiCamera()
|
|
||||||
camera.resolution = (640, 480)
|
|
||||||
camera.rotation = 0
|
|
||||||
|
|
||||||
image = io.BytesIO()
|
|
||||||
camera.capture(image, 'jpeg')
|
|
||||||
image.seek(0)
|
|
||||||
|
|
||||||
with open('image.jpg', 'wb') as image_file:
|
|
||||||
image_file.write(image.read())
|
|
||||||
|
|
||||||
prediction_url = '<prediction_url>'
|
|
||||||
prediction_key = '<prediction key>'
|
|
||||||
|
|
||||||
parts = prediction_url.split('/')
|
|
||||||
endpoint = 'https://' + parts[2]
|
|
||||||
project_id = parts[6]
|
|
||||||
iteration_name = parts[9]
|
|
||||||
|
|
||||||
prediction_credentials = ApiKeyCredentials(in_headers={"Prediction-key": prediction_key})
|
|
||||||
predictor = CustomVisionPredictionClient(endpoint, prediction_credentials)
|
|
||||||
|
|
||||||
image.seek(0)
|
|
||||||
results = predictor.classify_image(project_id, iteration_name, image)
|
|
||||||
|
|
||||||
for prediction in results.predictions:
|
|
||||||
print(f'{prediction.tag_name}:\t{prediction.probability * 100:.2f}%')
|
|
After Width: | Height: | Size: 96 KiB |
After Width: | Height: | Size: 82 KiB |
Before Width: | Height: | Size: 112 KiB After Width: | Height: | Size: 112 KiB |
Before Width: | Height: | Size: 82 KiB After Width: | Height: | Size: 76 KiB |
After Width: | Height: | Size: 456 KiB |