アバターと暮らす情報端末
この作品は、「アバターと暮らす情報端末」 です。
SONY Spresenseを使った「時計」「天気」情報を画像と音声で知らせるものです。
リアルタイムで人物認識を行い、アバターが挨拶してくれます。
機能
カメラの画像認識で実行する機能(Tensorflow lite microを使ったリアルタイム認識)
・人間の顔をリアルタイムで認識します。
条件
・認識していないところから認識した場合
・認識しているところから、認識しなくなった場合
動作
・液晶でのアバター画像表示と、音声合成を使ったスピーカ再生します。
ボタン押下で、実行する機能(ESP32と繋げたWifi機能)
動作1
・天気情報をネットから取得
・気温(最高気温/最低気温)をネットから取得
・時刻情報をネットから取得
動作2
・液晶での文字表示と、音声合成を使ったスピーカ再生します。
部品
- 作成に使用したパーツは下記となります。
品目 | 価格 |
---|---|
SONY SPRESENSE メインボード | 6,050 |
SONY SPRESENSE 拡張ボード | 3,850 |
SONY SPRESENSE カメラモジュール | 3,850 |
ESP32 DevKitC | 1,230 |
ad keyboard simulate five key | 130 |
Speaker | 180 |
ケース(100円ショップ) | 110 |
合計 | 15,400 |
設計図
部品を元に、下記配線を行います。
Spresense単体では、ネット接続機能がありません。
そこで、別途、ESP32を使用し、Wifi機能を使い、Spresenseと連携することでネット機能を実現しています。
Spresense
ILI9341
SPRESENSE | ILI9341 |
---|---|
AREF | VCC |
GND | GND |
SCK | SCK |
MISO | MISO |
MOSI | MOSI |
CS | CS |
PWM2 | DC |
GPIO | RESET |
3.3V | VCC |
ad keyboard simulate five key
SPRESENSE | ad keyboard simulate five key |
---|---|
A0 | OUT |
GND | GND |
Vout | VCC |
ESP32
接続
SPRESENSE | ESP32 |
---|---|
D00(RX) | 16(TX) |
D01(TX) | 17(RX) |
スピーカー
ESP32 | スピーカー |
---|---|
D25 | 赤線 |
GND | 黒線 |
使用ライブラリ
ボードマネージャ
-Spresense tensorflow Board Version 2.4.1
Arduino IDEの「環境設定」を開いて、「追加のボードマネージャのURL」に次のURLを追加します。
https://raw.githubusercontent.com/YoshinoTaro/spresense-arduino-tensorflow/main/package_spresense_tensorflow_index.json
PersonDetection
-参考:person_detect_model_data
グラフィック
-Adafruit_ILI9341-spresense
-Adafruit-GFX-Library-spresense
WebAPI
-天気予報
-音声合成VoiceText
プログラム
プログラム全体を下記にのせています。
Githubからソースコードを取得してください。
SpresenseのMainCoreプログラムは、容量が大きいため、Memory設定を「1280KB」に変更してください。
アプリケーション本体(SpresenseCommunication.ino)
#include <Camera.h>
#include "tensorflow/lite/micro/all_ops_resolver.h"
#include "tensorflow/lite/micro/micro_error_reporter.h"
#include "tensorflow/lite/micro/micro_interpreter.h"
#include "tensorflow/lite/micro/system_setup.h"
#include "tensorflow/lite/schema/schema_generated.h"
#include "person_detect_model.h"
#include "SPI.h"
#include "Adafruit_GFX.h"
#include "Adafruit_ILI9341.h"
#include "avator.h"
#include "avator2.h"
#include "cloud1.h"
#include "cloud2.h"
#include "cloud3.h"
//Button
#define BTN_PIN A0
#define CMD_DATE 1
#define CMD_WEATHER 2
#define CMD_LIGHT 5
#define CMD_SILENT 6
#define CMD_HELLO 7
#define CMD_BYE 8
// BUTTONN KEY ID
int push_index = 0;
//TFT
#define TFT_RST 8
#define TFT_DC 9
#define TFT_CS 10
//Dispaly
#define TFT_BACKLIGHT_PIN 7
Adafruit_ILI9341 tft = Adafruit_ILI9341(&SPI, TFT_DC, TFT_CS, TFT_RST);
bool isTftLight = true;
int g_sendCmd = 0;
int g_weather = 0;
String g_day = "2022/-/-";
String g_time = "-:-";
String g_tempMax = "-";
String g_tempMin = "-";
//TF
tflite::ErrorReporter* error_reporter = nullptr;
const tflite::Model* model = nullptr;
tflite::MicroInterpreter* interpreter = nullptr;
TfLiteTensor* input = nullptr;
TfLiteTensor* output = nullptr;
int inference_count = 0;
constexpr int kTensorArenaSize = 100000;
uint8_t tensor_arena[kTensorArenaSize];
// cropping and scaling parameters
const int offset_x = 32;
const int offset_y = 12;
const int width = 320;
const int height = 240;
const int target_w = 96;
const int target_h = 96;
uint16_t disp[width*height];
uint32_t last_mills = 0;
bool g_isPerson = false;
int getButtonKey(){
int index = 0;
int data = analogRead(BTN_PIN);
//Serial.println(data);
if (5 <= data && data <= 70) {
push_index = 1;
} else if (90 <= data && data <= 150) {
push_index = 2;
} else if (300 <= data && data <= 350) {
push_index = 3;
} else if (360 <= data && data <= 500) {
push_index = 4;
} else if (530 <= data && data <= 700) {
push_index = 5;
} else {
if (push_index != 0) {
index = push_index;
push_index = 0;
Serial.print("btn= ");
Serial.println(index);
}
}
return index;
}
//setup_display
void setup_display() {
tft.begin(40000000);
tft.setRotation(3);
}
void disp_image(int weather) {
//avator
if (g_isPerson) {
tft.drawRGBBitmap(320-200, 40, avator2, 200, 200);
} else {
tft.drawRGBBitmap(320-145, 0, avator, 145, 240);
}
yield();
//weather
//晴れ
if (weather == 2) {
tft.drawRGBBitmap(2, 240-82, cloud1, 70, 72);
}
//雨
else if (weather == 1) {
tft.drawRGBBitmap(2, 240-82, cloud2, 70, 72);
}
//くもり
else if (weather == 3) {
tft.drawRGBBitmap(2, 240-82, cloud3, 70, 72);
}
}
void disp_watch(String day, String time){
tft.setTextColor(ILI9341_WHITE);
tft.setCursor(10, 38);
tft.setTextSize(3);
tft.print(day.c_str());
tft.setCursor(10, 72);
tft.setTextSize(4);
tft.print(time.c_str());
yield();
Serial.print("day=");
Serial.println(day);
Serial.print("time=");
Serial.println(time);
}
void disp_temperature(String max, String min){
tft.setTextColor(ILI9341_RED);
tft.setCursor(70, 175);
tft.setTextSize(4);
tft.print(max.c_str());
tft.setTextColor(ILI9341_WHITE);
tft.print("/");
tft.setTextColor(ILI9341_BLUE);
tft.print(min.c_str());
}
void disp_refresh(){
tft.setCursor(0, 0);
tft.fillScreen(ILI9341_BLACK);
yield();
disp_image(g_weather);
yield();
disp_watch(g_day, g_time);
yield();
disp_temperature(g_tempMax, g_tempMin);
}
void disp_light(){
if (isTftLight){
Serial.println("light on");
digitalWrite(TFT_BACKLIGHT_PIN,HIGH);
disp_refresh();
} else {
Serial.println("light off");
digitalWrite(TFT_BACKLIGHT_PIN,LOW);
}
isTftLight = !isTftLight;
}
void commandFunction(int cmd) {
if (g_sendCmd != 0){
return;
}
if (cmd == CMD_DATE){
Serial.println("date");
Serial2.println("date");
g_sendCmd = CMD_DATE;
} else if (cmd == CMD_WEATHER){
Serial.println("weather");
Serial2.println("weather");
g_sendCmd = CMD_WEATHER;
} else if (cmd == CMD_LIGHT){
disp_light();
disp_refresh();
g_sendCmd = 0;
} else if (cmd == CMD_SILENT){
Serial.println("silent");
Serial2.println("silent");
g_sendCmd = CMD_SILENT;
} else if (cmd == CMD_HELLO){
Serial.println("hello");
Serial2.println("hello");
g_sendCmd = CMD_HELLO;
} else if (cmd == CMD_BYE){
Serial.println("bye");
Serial2.println("bye");
g_sendCmd = CMD_BYE;
}
}
int Split(String data, char delimiter, String *dst){
int index = 0;
int arraySize = (sizeof(data)/sizeof((data)[0]));
int datalength = data.length();
for (int i = 0; i < datalength; i++) {
char tmp = data.charAt(i);
if ( tmp == delimiter ) {
index++;
if ( index > (arraySize - 1))
return -1;
} else {
dst[index] += tmp;
}
}
}
void DrawDate(String data) {
//2022/9/6,22:18
String cmds[3] = {"", "", "\0"};
Split(data, ',', cmds);
g_day = cmds[0];
g_time = cmds[1];
disp_refresh();
}
void DrawWeather(String data) {
//1,30,25
String cmds[4] = {"", "", "", "\0"};
Split(data, ',', cmds);
g_weather = cmds[0].toInt();
g_tempMax = cmds[1];
g_tempMin = cmds[2];
disp_refresh();
}
void CamCB(CamImage img) {
uint32_t current_mills = millis();
uint32_t duration = current_mills - last_mills;
if (duration < 5000 || g_sendCmd != 0) {
return;
}
Serial.println("start detect");
tft.writeFillRect(20, 0, 20, 20, ILI9341_YELLOW);
if (!img.isAvailable()) {
Serial.println("img is not available");
return;
}
CamImage small;
CamErr err = img.resizeImageByHW(small, 160, 120);
if (!small.isAvailable()) {
Serial.println("small is not available");
return;
}
uint16_t* buf = (uint16_t*)small.getImgBuff();
int n = 0;
for (int y = offset_y; y < offset_y + target_h; ++y) {
for (int x = offset_x; x < offset_x + target_w; ++x) {
// extracting luminance data from YUV422 data
uint16_t value = buf[y*width + x];
uint16_t y_h = (value & 0xf000) >> 8;
uint16_t y_l = (value & 0x00f0) >> 4;
value = (y_h | y_l); /* luminance data */
/* set the grayscale data to the input buffer for TensorFlow */
input->data.f[n++] = (float)(value)/255.0;
}
}
Serial.println("detect");
TfLiteStatus invoke_status = interpreter->Invoke();
if (invoke_status != kTfLiteOk) {
Serial.println("Invoke failed");
return;
}
bool result = false;
int8_t person_score = output->data.uint8[1];
int8_t no_person_score = output->data.uint8[0];
Serial.print("Person = " + String(person_score) + ", ");
Serial.print("No_person = " + String(no_person_score));
if ((person_score > no_person_score) && (person_score > 60)) {
digitalWrite(LED3, HIGH);
result = true;
Serial.println(" : ON");
if (!g_isPerson) {
g_isPerson = true;
tft.writeFillRect(0, 0, 20, 20, ILI9341_RED);
commandFunction(CMD_HELLO);
disp_refresh();
}
} else {
digitalWrite(LED3, LOW);
Serial.println(" : OFF");
if (g_isPerson) {
g_isPerson = false;
tft.writeFillRect(0, 0, 20, 20, ILI9341_BLUE);
commandFunction(CMD_BYE);
disp_refresh();
}
}
tft.writeFillRect(20, 0, 20, 20, ILI9341_BLACK);
last_mills = millis();
}
void setup() {
Serial.begin(115200);
Serial2.begin(115200, SERIAL_8N1);
// Button
pinMode(BTN_PIN, INPUT);
//pinMode(TFT_BACKLIGHT_PIN,OUTPUT);
// Display
Serial.println("setup_display");
setup_display();
disp_light();
// TF
Serial.println("InitializeTarget");
tflite::InitializeTarget();
memset(tensor_arena, 0, kTensorArenaSize*sizeof(uint8_t));
// Set up logging.
static tflite::MicroErrorReporter micro_error_reporter;
error_reporter = µ_error_reporter;
// Map the model into a usable data structure..
Serial.println("GetModel");
model = tflite::GetModel(model_tflite);
if (model->version() != TFLITE_SCHEMA_VERSION) {
Serial.println("Model provided is schema version "
+ String(model->version()) + " not equal "
+ "to supported version "
+ String(TFLITE_SCHEMA_VERSION));
return;
} else {
Serial.println("Model version: " + String(model->version()));
}
// This pulls in all the operation implementations we need.
static tflite::AllOpsResolver resolver;
// Build an interpreter to run the model with.
static tflite::MicroInterpreter static_interpreter(
model, resolver, tensor_arena, kTensorArenaSize, error_reporter);
interpreter = &static_interpreter;
// Allocate memory from the tensor_arena for the model's tensors.
TfLiteStatus allocate_status = interpreter->AllocateTensors();
if (allocate_status != kTfLiteOk) {
Serial.println("AllocateTensors() failed");
return;
} else {
Serial.println("AllocateTensor() Success");
}
size_t used_size = interpreter->arena_used_bytes();
Serial.println("Area used bytes: " + String(used_size));
input = interpreter->input(0);
output = interpreter->output(0);
Serial.println("Model input:");
Serial.println("dims->size: " + String(input->dims->size));
for (int n = 0; n < input->dims->size; ++n) {
Serial.println("dims->data[" + String(n) + "]: " + String(input->dims->data[n]));
}
Serial.println("Model output:");
Serial.println("dims->size: " + String(output->dims->size));
for (int n = 0; n < output->dims->size; ++n) {
Serial.println("dims->data[" + String(n) + "]: " + String(output->dims->data[n]));
}
Serial.println("Completed tensorflow setup");
digitalWrite(LED0, HIGH);
Serial.println("theCamera.begin");
CamErr err = theCamera.begin(1, CAM_VIDEO_FPS_5, width, height, CAM_IMAGE_PIX_FMT_YUV422);
if (err != CAM_ERR_SUCCESS) {
Serial.println("camera begin err: " + String(err));
return;
}
err = theCamera.startStreaming(true, CamCB);
if (err != CAM_ERR_SUCCESS) {
Serial.println("start streaming err: " + String(err));
return;
}
Serial.println("setup");
}
void loop() {
int key = getButtonKey();
commandFunction(key);
if(Serial2.available() >0) {
String data = "";
data = Serial2.readString();
Serial.print("read = ");
Serial.print(data);
if (g_sendCmd == CMD_DATE || g_sendCmd == CMD_SILENT) {
DrawDate(data);
} else if (g_sendCmd == CMD_WEATHER) {
DrawWeather(data);
}
g_sendCmd = 0;
}
delay(200);
}
Wifi用サブアプリケーション本体(ESP32SpeakCommunictoin.ino)
#include <WiFi.h>
#include <time.h>
#include <ArduinoJson.h>
//Speaker
#include <HTTPClient.h>
#include <base64.h> // for http basic auth
#include <driver/dac.h> // Audio dac
#define JST 3600* 9
// Your WiFi credentials.
const char* ssid = "XXXXXX";
const char* pass = "XXXXXX";
String retData;
//-----------------------------------------------------------------------------------------------
// Weather example 愛知県西部の天気予報を取得
const char* endpoint = "https://www.drk7.jp/weather/json/23.js";
const char* region = "西部";
DynamicJsonDocument weatherInfo(20000);
//-----------------------------------------------------------------------------------------------
// VoiceText Web API
uint16_t data16; // wav data 16bit(2 bytes)
uint8_t left; // Audio dac voltage
// You should get apikey
// visit https://cloud.voicetext.jp/webapi
const String tts_url = "https://api.voicetext.jp/v1/tts";
const String tts_user = "XXXXX SET YOUR ID";
const String tts_pass = ""; // passwd is blank
uint16_t delayus = 60; // depends on the sampling rate
uint8_t wavHeadersize = 44; // depends on the wav format
String tts_parms ="&speaker=show&volume=200&speed=120"; // he has natural(16kHz) wav voice
// play 16bit wav data
void playWav16(uint8_t * buffPlay, int len) {
for( int i=0 ; i<len; i+=sizeof(data16)) {
memcpy(&data16, (char*)buffPlay + i, sizeof(data16));
left = ((uint16_t) data16 + 32767) >> 8; // convert 16bit to 8bit
dac_output_voltage(DAC_CHANNEL_1, left);
ets_delay_us(delayus);
}
}
// text to speech
void text2speech(char * text) {
Serial.println("text to speech");
if ((WiFi.status() == WL_CONNECTED))
{ //Check the current connection status
HTTPClient http; // Initialize the client library
size_t size = 0; // available streaming data size
http.begin(tts_url); //Specify the URL
Serial.println("\nStarting connection to tts server...");
//request header for VoiceText Web API
String auth = base64::encode(tts_user + ":" + tts_pass);
http.addHeader("Authorization", "Basic " + auth);
http.addHeader("Content-Type", "application/x-www-form-urlencoded");
String request = String("text=") + URLEncode(text) + tts_parms;
http.addHeader("Content-Length", String(request.length()));
//Make the request
int httpCode = http.POST(request);
if (httpCode > 0) {
// HTTP header has been send and Server response header has been handled
Serial.printf("[HTTP] POST... code: %d\n", httpCode);
// file found at server
if (httpCode == HTTP_CODE_OK) {
// get lenght of data (is -1 when Server sends no Content-Length header)
int len = http.getSize();
Serial.printf("lenght of data: %d\n", len);
// create buffer for read
uint8_t buff[128] = { 0 };
int buffPoint = 0;
// get tcp stream
WiFiClient * stream = http.getStreamPtr();
// read wav header from server
while(size < wavHeadersize && http.connected() && (len > 0 || len == -1)) {
// get available data size
size = stream->available();
}
if (size >= wavHeadersize) {
int c = stream->readBytes(buff, wavHeadersize);
if (strncmp((char*)buff + wavHeadersize -8, "data", 4)) {
Serial.printf("Error: wav file\n");
return;
}
if (len >= wavHeadersize )
len -=wavHeadersize;
} else {
Serial.printf("Error: wav file\n");
return;
}
Serial.printf("wav header confirmed\n");
// read streaming data from server
while (http.connected() && (len > 0 || len == -1)) {
// get available data size
size = stream->available();
if (size > 0 ) {
int buffLeft = sizeof(buff)-buffPoint;
int c = stream->readBytes(buff+buffPoint, ((size > buffLeft) ? buffLeft : size ));
//Serial.printf("read stream size: %d\n",c);
buffPoint += c;
if (len >=0)
len -= c;
if (buffPoint >= sizeof(buff)) {
playWav16(buff, buffPoint);
buff[0] = buff[buffPoint-1];
buffPoint = buffPoint % sizeof(data16);
}
}
}
if (buffPoint > sizeof(data16)) {
playWav16(buff, buffPoint);
}
Serial.printf("len: %d buffPoint: %d\n",len,buffPoint);
}
Serial.println("finish play");
} else {
Serial.printf("[HTTP] POST... failed, error: %s\n", http.errorToString(httpCode).c_str());
}
http.end(); //Free resources
} else {
Serial.println("Error in WiFi connection");
}
dac_output_voltage(DAC_CHANNEL_1, 0);
}
//日付
String GetDate() {
time_t t;
struct tm *tm;
t = time(NULL);
tm = localtime(&t);
String date = String(tm->tm_year+1900);
retData = date;
date += "年";
retData += "/";
date += String(tm->tm_mon+1);
retData += String(tm->tm_mon+1);
date += "月";
retData += "/";
date += String(tm->tm_mday);
retData += String(tm->tm_mday);
date += "日";
retData += ",";
date += String(tm->tm_hour);
retData += String(tm->tm_hour);
date += "時";
retData += ":";
date += String(tm->tm_min);
retData += String(tm->tm_min);
date += "分";
date += String(tm->tm_sec);
date += "秒";
return date;
}
//天気
DynamicJsonDocument getJson() {
DynamicJsonDocument doc(20000);
if ((WiFi.status() == WL_CONNECTED)) {
HTTPClient http;
http.begin(endpoint);
int httpCode = http.GET();
if (httpCode > 0) {
//jsonオブジェクトの作成
String jsonString = createJson(http.getString());
deserializeJson(doc, jsonString);
} else {
Serial.println("Error on HTTP request");
}
http.end(); //リソースを解放
}
return doc;
}
// JSONP形式からJSON形式に変える
String createJson(String jsonString) {
jsonString.replace("drk7jpweather.callback(","");
return jsonString.substring(0,jsonString.length()-2);
}
String drawWeather(String infoWeather) {
String data = "今日の天気は、";
DynamicJsonDocument doc(20000);
deserializeJson(doc, infoWeather);
String weather = doc["weather"];
retData = "2,";
if (weather.indexOf("雨") != -1) {
if (weather.indexOf("くもり") != -1) {
data += "雨のち曇り";
} else {
data += "雨";
}
retData = "1,";
} else if (weather.indexOf("晴") != -1) {
if (weather.indexOf("くもり") != -1) {
data += "晴れのち曇り";
} else {
data += "晴れ";
}
retData = "2,";
} else if (weather.indexOf("くもり") != -1) {
data += "曇り";
retData = "3,";
} else if (weather.indexOf("雪") != -1) {
data += "雪";
retData = "4,";
} else {
retData = "5,";
}
String maxTemperature = doc["temperature"]["range"][0]["content"];
String minTemperature = doc["temperature"]["range"][1]["content"];
data += "、最高気温は";
data += maxTemperature;
data += "度、最低気温は";
data += minTemperature;
data += "度です。";
retData += maxTemperature;
retData += ",";
retData += minTemperature;
return data;
}
String GetWeather() {
weatherInfo = getJson();
String today = weatherInfo["pref"]["area"][region]["info"][0];
return drawWeather(today);
}
String URLEncode(const char* msg) {
const char *hex = "0123456789ABCDEF";
String encodedMsg = "";
while (*msg != '\0') {
if ( ('a' <= *msg && *msg <= 'z')
|| ('A' <= *msg && *msg <= 'Z')
|| ('0' <= *msg && *msg <= '9')
|| *msg == '-' || *msg == '_' || *msg == '.' || *msg == '~' ) {
encodedMsg += *msg;
} else {
encodedMsg += '%';
encodedMsg += hex[*msg >> 4];
encodedMsg += hex[*msg & 0xf];
}
msg++;
}
return encodedMsg;
}
//Hello
String GetHello() {
retData = "hello";
return "こんにちは!";
}
//Bye
String GetBye() {
retData = "bye";
return "さようなら!";
}
//setup
void setup() {
Serial.begin(115200);
Serial2.begin(115200, SERIAL_8N1);
Serial.println("setup-start");
// Speaker
dac_output_enable(DAC_CHANNEL_1); // use DAC_CHANNEL_1 (pin 25 fixed)
dac_output_voltage(DAC_CHANNEL_1, 0);
// WiFi setup
WiFi.mode(WIFI_STA); // Disable Access Point
WiFi.begin(ssid, pass);
while (WiFi.status() != WL_CONNECTED) {
delay(1000);
Serial.println("Connecting to WiFi..");
}
Serial.println("Connected to the WiFi network");
Serial.println("configTime");
configTime( JST, 0, "ntp.nict.jp", "ntp.jst.mfeed.ad.jp");
Serial.println("setup-end");
}
//loop
void loop() {
if(Serial2.available() > 0) {
String data = Serial2.readString();
Serial.print("get= ");
Serial.println(data.c_str());
//Command Get
//[date]
if(data.indexOf("date") >= 0){
data = GetDate();
} else if(data.indexOf("silent") >= 0){
GetDate();
data = "";
}
//[weather]
else if(data.indexOf("weather") >= 0) {
data = GetWeather();
}
//[hello]
else if(data.indexOf("hello") >= 0) {
data = GetHello();
}
//[bye]
else if(data.indexOf("bye") >= 0) {
data = GetBye();
}
else {
return;
}
//send
Serial2.println(retData.c_str());
//speak
if (data.length() > 1) {
Serial.println(data.c_str());
text2speech((char *)data.c_str());
}
}
}
画像の描画は、RGB565のBitmapデータを使用しています。
SDカードからファイル読み込まず、プログラムに組み込んでいます。
Bitmapファイルから、プログラムの配列データへの変換プログラムは下記となります。
BitmapをRGB565のファイル
using System.IO;
using System.Text;
using System.Drawing;
namespace ConsoleApp1{
class Program{
static void Main(string[] args){
WriteData(@"C:\Work\src.bmp", @"C:\Work\src.h");
}
static ushort color565(ushort red, ushort green, ushort blue){
return (ushort)(((red & 0xF8) << 8) | ((green & 0xFC) << 3) | (blue >> 3));
}
static void WriteData(string bmpPath, string writePath){
using (var bitmap = new Bitmap(bmpPath))
using (var writer = new StreamWriter(writePath, false, Encoding.UTF8)){
int w = bitmap.Width, h = bitmap.Height;
var fileName = System.IO.Path.GetFileNameWithoutExtension(bmpPath);
writer.Write("uint16_t ");
writer.Write(fileName);
writer.WriteLine("[]={");
for (int y = 0; y < h; y++) {
for (int x = 0; x < w; x++) {
Color pixel = bitmap.GetPixel(x, y);
// ARGB
var d = color565(pixel.R, pixel.G, pixel.B);
if (x + 1 != w) {
writer.Write(string.Format("0x{0:x},", d));
} else {
writer.Write(string.Format("0x{0:x},\n", d));
}
}
}
writer.Write(@"};");
}
}
}
}
最後に
- まだまだ機能的には少ないため、改良の余地があります。
- 人物認識は処理速度が遅く、今回のような使い方では常に動かし続けるメリットはあまりないため、
人感センサーと組み合わせ、応答があったときをトリガーに、認識処理を開始させた方が良いかもしれません。
【今後の応用】
・TwitterやSlackなどの更新情報を通知するなど
投稿者の人気記事
-
tktk360
さんが
2022/09/11
に
編集
をしました。
(メッセージ: 初版)
-
tktk360
さんが
2024/01/22
に
編集
をしました。
ログインしてコメントを投稿する