// Multicam v.2.19
// ESP32 has two cores, APPlication and PROcess
#define APP_CPU 1
#define PRO_CPU 0
#include "src/OV2640.h"
#include <WiFi.h>
#include <WebServer.h>
#include <WiFiClient.h>
// we should disable bt
//#include <esp_bt.h>
#include <esp_wifi.h>
#include <esp_sleep.h>
#include <driver/rtc_io.h>
//disable brownout problems
#include "soc/soc.h"
#include "soc/rtc_cntl_reg.h"
#define CAMERA_MODEL_AI_THINKER
//those are the GPIO pins for AI_THINKER - find yours if you have a different SOC
#define PWDN_GPIO_NUM 32
#define RESET_GPIO_NUM -1
#define XCLK_GPIO_NUM 0
#define SIOD_GPIO_NUM 26
#define SIOC_GPIO_NUM 27
#define Y9_GPIO_NUM 35
#define Y8_GPIO_NUM 34
#define Y7_GPIO_NUM 39
#define Y6_GPIO_NUM 36
#define Y5_GPIO_NUM 21
#define Y4_GPIO_NUM 19
#define Y3_GPIO_NUM 18
#define Y2_GPIO_NUM 5
#define VSYNC_GPIO_NUM 25
#define HREF_GPIO_NUM 23
#define PCLK_GPIO_NUM 22
// wifi, because I'm too lazy to put it in a different file
#define SSID1 "my_WiFi"
#define PWD1 "myp455w0rd"
//init camera
OV2640 cam;
//init webserver
WebServer server(80);
// ===== rtos task handles =====
// Streaming is implemented with 3 tasks:
// handle client connections to the webserver
TaskHandle_t tMjpeg;
// handle getting picture frames from the camera and storing them locally
TaskHandle_t tCam;
// actually streaming frames to all connected clients
TaskHandle_t tStream;
// frameSync semaphore is used to prevent streaming buffer while is replaced with the next frame
SemaphoreHandle_t frameSync = NULL;
// Queue stores currently connected clients to whom we are streaming
QueueHandle_t streamingClients;
// We will try to achieve 15 FPS frame rate - for surveilllance, it is ok-ish...
const int FPS = 15;
// We will handle web client requests every 100 ms (10 Hz) - web can wait a bit
const int WSINTERVAL = 100;
// ======== Server Connection Handler Task ==========
void mjpegCB(void* pvParameters) {
TickType_t xLastWakeTime;
const TickType_t xFrequency = pdMS_TO_TICKS(WSINTERVAL);
// Creating frame synchronization semaphore and initializing it
frameSync = xSemaphoreCreateBinary();
xSemaphoreGive(frameSync);
// Creating a queue to track all connected clients
streamingClients = xQueueCreate(10, sizeof(WiFiClient*));
//=== Setup section ===
// Creating RTOS task for grabbing frames from the camera
xTaskCreatePinnedToCore(
camCB, // callback
"cam", // name
4096, // stacj size
NULL, // parameters
2, // priority
&tCam, // RTOS task handle
APP_CPU); // core
// Creating task to push the stream to all connected clients
xTaskCreatePinnedToCore(
streamCB,
"strmCB",
4 * 1024,
NULL, //(void*) handler,
2,
&tStream,
APP_CPU);
// Registering webserver handling routines
server.on("/mjpeg", HTTP_GET, handleJPGSstream);
server.on("/jpeg", HTTP_GET, handleJPG);
server.onNotFound(handleNotFound);
// Starting webserver
server.begin();
//=== loop() section ====
xLastWakeTime = xTaskGetTickCount();
for (;;) {
server.handleClient();
// After every server client handling request, we let other tasks run and then pause
taskYIELD();
vTaskDelayUntil(&xLastWakeTime, xFrequency);
}
}
// Commonly used variables:
volatile size_t camSize; // size of the current frame, byte
volatile char* camBuf; // pointer to the current frame
// ==== RTOS task to grab frames from the camera ====
void camCB(void* pvParameters) {
TickType_t xLastWakeTime;
// A running interval associated with currently desired frame rate
const TickType_t xFrequency = pdMS_TO_TICKS(1000 / FPS);
// Mutex for the critical section of swithing the active frames around
portMUX_TYPE xSemaphore = portMUX_INITIALIZER_UNLOCKED;
// Pointers to the 2 frames, their respective sizes and index of the current frame
char* fbs[2] = { NULL, NULL };
size_t fSize[2] = { 0, 0 };
int ifb = 0;
//=== loop() section ===
xLastWakeTime = xTaskGetTickCount();
for (;;) {
// Grab a frame from the camera and query its size
cam.run();
size_t s = cam.getSize();
// If frame size is more that we have previously allocated - request 125% of the current frame space
if (s > fSize[ifb]) {
fSize[ifb] = s * 4 / 3;
fbs[ifb] = allocateMemory(fbs[ifb], fSize[ifb]);
}
// Copy current frame into local buffer
char* b = (char*)cam.getfb();
memcpy(fbs[ifb], b, s);
// Let other tasks run and wait until the end of the current frame rate interval (if any time left)
taskYIELD();
vTaskDelayUntil(&xLastWakeTime, xFrequency);
// Only switch frames around if no frame is currently being streamed to a client
// Wait on a semaphore until client operation completes
xSemaphoreTake(frameSync, portMAX_DELAY);
// Do not allow interrupts while switching the current frame
portENTER_CRITICAL(&xSemaphore);
camBuf = fbs[ifb];
camSize = s;
ifb++;
ifb &= 1; // this should produce a 1, 0, 1, 0, 1 ... sequence
portEXIT_CRITICAL(&xSemaphore);
// Let anyone waiting for a frame know that the frame is ready
xSemaphoreGive(frameSync);
// Technically only needed once: let the streaming task know that we have at least one frame
// and it could start sending frames to the clients, if any
xTaskNotifyGive(tStream);
// Immediately let other (streaming) tasks run
taskYIELD();
// If streaming task has suspended itself (no active clients to stream to) there is no need to grab frames from the camera. We can save some power by suspending the tasks
if (eTaskGetState(tStream) == eSuspended) {
vTaskSuspend(NULL); //NULL means "suspend yourself"
}
}
}
// ==== Memory allocator uses of PSRAM if present ====
char* allocateMemory(char* aPtr, size_t aSize) {
// Since current buffer is too small, free it
if (aPtr != NULL) free(aPtr);
size_t freeHeap = ESP.getFreeHeap();
char* ptr = NULL;
// If memory requested is more than 2/3 of the currently free heap, try PSRAM immediately
if (aSize > freeHeap * 2 / 3) {
if (psramFound() && ESP.getFreePsram() > aSize) {
ptr = (char*)ps_malloc(aSize);
}
} else {
// Enough free heap - let's try allocating fast RAM as a buffer
ptr = (char*)malloc(aSize);
// If allocation on the heap failed, let's give PSRAM one more chance:
if (ptr == NULL && psramFound() && ESP.getFreePsram() > aSize) {
ptr = (char*)ps_malloc(aSize);
}
}
// Well, if the memory pointer is NULL, we were not able to allocate any memory, and that is the end. RESTART.
if (ptr == NULL) {
ESP.restart();
}
return ptr;
}
// ==== STREAMING ======
const char HEADER[] = "HTTP/1.1 200 OK\r\n"
"Access-Control-Allow-Origin: *\r\n"
"Content-Type: multipart/x-mixed-replace; boundary=123456789000000000000987654321\r\n";
const char BOUNDARY[] = "\r\n--123456789000000000000987654321\r\n";
const char CTNTTYPE[] = "Content-Type: image/jpeg\r\nContent-Length: ";
const int hdrLen = strlen(HEADER);
const int bdrLen = strlen(BOUNDARY);
const int cntLen = strlen(CTNTTYPE);
// ==== Handle connection request from clients ======
void handleJPGSstream(void) {
// Can only acommodate 10 clients. The limit is a default for WiFi connections
if (!uxQueueSpacesAvailable(streamingClients)) return;
// Create a new WiFi Client object to keep track of this one
WiFiClient* client = new WiFiClient();
*client = server.client();
// Immediately send this client a header
client->write(HEADER, hdrLen);
client->write(BOUNDARY, bdrLen);
// Push the client to the streaming queue
xQueueSend(streamingClients, (void*)&client, 0);
// Wake up streaming tasks if they were previously suspended:
if (eTaskGetState(tCam) == eSuspended) vTaskResume(tCam);
if (eTaskGetState(tStream) == eSuspended) vTaskResume(tStream);
}
// ==== Actually stream content to all connected clients ====
void streamCB(void* pvParameters) {
char buf[16];
TickType_t xLastWakeTime;
TickType_t xFrequency;
// Wait until the first frame is captured - only after we have something to send
ulTaskNotifyTake(pdTRUE, /* Clear the notification value before exiting. */
portMAX_DELAY); /* Block indefinitely. */
xLastWakeTime = xTaskGetTickCount();
for (;;) {
// Default assumption: we are running according to the FPS
xFrequency = pdMS_TO_TICKS(1000 / FPS);
// Only send anything if there is someone watching
UBaseType_t activeClients = uxQueueMessagesWaiting(streamingClients);
if (activeClients) {
// Adjust the period to the number of connected clients
xFrequency /= activeClients;
// Since we are sending the same frame to everyone,
// pop a client from the the front of the queue
WiFiClient* client;
xQueueReceive(streamingClients, (void*)&client, 0);
// Check if this client is still connected.
if (!client->connected()) {
// delete this client reference if it has disconnected
// and don't put it back on the queue anymore.
delete client;
} else {
// OK, this is an actively connected client.
// Let's grab a semaphore to prevent frame changes while we are serving the current
xSemaphoreTake(frameSync, portMAX_DELAY);
client->write(CTNTTYPE, cntLen);
sprintf(buf, "%d\r\n\r\n", camSize);
client->write(buf, strlen(buf));
client->write((char*)camBuf, (size_t)camSize);
client->write(BOUNDARY, bdrLen);
// Since this client is still connected, push it to the end
// of the queue for further processing
xQueueSend(streamingClients, (void*)&client, 0);
// The frame has been served. Release the semaphore and let other tasks run.
// If there is a frame switch ready, it will happen now in between frames
xSemaphoreGive(frameSync);
taskYIELD();
}
} else {
// Since there are no connected clients, there is no reason to waste power running
vTaskSuspend(NULL);
}
// Let other tasks run after serving every client
taskYIELD();
vTaskDelayUntil(&xLastWakeTime, xFrequency);
}
}
const char JHEADER[] = "HTTP/1.1 200 OK\r\n"
"Content-disposition: inline; filename=capture.jpg\r\n"
"Content-type: image/jpeg\r\n\r\n";
const int jhdLen = strlen(JHEADER);
// ==== Serve up one JPEG frame =========
void handleJPG(void) {
WiFiClient client = server.client();
if (!client.connected()) return;
cam.run();
client.write(JHEADER, jhdLen);
client.write((char*)cam.getfb(), cam.getSize());
}
// ==== Handle invalid URL requests =====
void handleNotFound() {
String message = "This camera runs fine, you are asking the wrong question!\n
you should only ask for /mjpeg or /jpeg here\n\n";
message += "URL: ";
message += server.uri();
message += "\nMethod: ";
message += (server.method() == HTTP_GET) ? "GET" : "POST";
message += "\nArguments: ";
message += server.args();
message += "\n";
server.send(200, "text / plain", message);
}
// we're at the classic setup function
void setup() {
//disable brownout detector
WRITE_PERI_REG(RTC_CNTL_BROWN_OUT_REG, 0);
// Configure the camera
camera_config_t config;
config.ledc_channel = LEDC_CHANNEL_0;
config.ledc_timer = LEDC_TIMER_0;
config.pin_d0 = Y2_GPIO_NUM;
config.pin_d1 = Y3_GPIO_NUM;
config.pin_d2 = Y4_GPIO_NUM;
config.pin_d3 = Y5_GPIO_NUM;
config.pin_d4 = Y6_GPIO_NUM;
config.pin_d5 = Y7_GPIO_NUM;
config.pin_d6 = Y8_GPIO_NUM;
config.pin_d7 = Y9_GPIO_NUM;
config.pin_xclk = XCLK_GPIO_NUM;
config.pin_pclk = PCLK_GPIO_NUM;
config.pin_vsync = VSYNC_GPIO_NUM;
config.pin_href = HREF_GPIO_NUM;
config.pin_sscb_sda = SIOD_GPIO_NUM;
config.pin_sscb_scl = SIOC_GPIO_NUM;
config.pin_pwdn = PWDN_GPIO_NUM;
config.pin_reset = RESET_GPIO_NUM;
config.xclk_freq_hz = 20000000;
config.pixel_format = PIXFORMAT_JPEG;
// Frame parameters: UXGA is ok if we only want a decent framerate of 15fps
config.frame_size = FRAMESIZE_UXGA;
// config.frame_size = FRAMESIZE_SVGA;
// config.frame_size = FRAMESIZE_VGA;
// config.frame_size = FRAMESIZE_QVGA;
config.jpeg_quality = 12;
config.fb_count = 2;
if (cam.init(config) != ESP_OK) {
delay(10000);
ESP.restart();
}
// Configure and connect to WiFi
WiFi.mode(WIFI_STA);
WiFi.begin(SSID1, PWD1);
while (WiFi.status() != WL_CONNECTED) {
delay(500);
}
// Start main streaming RTOS task
xTaskCreatePinnedToCore(
mjpegCB,
"mjpeg",
4 * 1024,
NULL,
2,
&tMjpeg,
APP_CPU);
}
// variables for wifi reconnect
unsigned long previousMillis = 0;
unsigned long interval = 30000;
void loop() {
vTaskDelay(1000);
//Check Wifi status
unsigned long currentMillis = millis();
// if WiFi is down, try reconnecting every interval mseconds
if ((WiFi.status() != WL_CONNECTED) && (currentMillis - previousMillis >= interval)) {
WiFi.disconnect();
vTaskDelay(1000);
WiFi.reconnect();
previousMillis = currentMillis;
}
}