Working with mqtt

This commit is contained in:
Simon Milvert 2021-01-20 20:36:04 +01:00
parent 738161b0da
commit 61db3d9216
52 changed files with 368 additions and 5846 deletions

67
esp/.travis.yml Executable file
View File

@ -0,0 +1,67 @@
# Continuous Integration (CI) is the practice, in software
# engineering, of merging all developer working copies with a shared mainline
# several times a day < https://docs.platformio.org/page/ci/index.html >
#
# Documentation:
#
# * Travis CI Embedded Builds with PlatformIO
# < https://docs.travis-ci.com/user/integration/platformio/ >
#
# * PlatformIO integration with Travis CI
# < https://docs.platformio.org/page/ci/travis.html >
#
# * User Guide for `platformio ci` command
# < https://docs.platformio.org/page/userguide/cmd_ci.html >
#
#
# Please choose one of the following templates (proposed below) and uncomment
# it (remove "# " before each line) or use own configuration according to the
# Travis CI documentation (see above).
#
#
# Template #1: General project. Test it using existing `platformio.ini`.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio run
#
# Template #2: The project is intended to be used as a library with examples.
#
# language: python
# python:
# - "2.7"
#
# sudo: false
# cache:
# directories:
# - "~/.platformio"
#
# env:
# - PLATFORMIO_CI_SRC=path/to/test/file.c
# - PLATFORMIO_CI_SRC=examples/file.ino
# - PLATFORMIO_CI_SRC=path/to/test/directory
#
# install:
# - pip install -U platformio
# - platformio update
#
# script:
# - platformio ci --lib="." --board=ID_1 --board=ID_2 --board=ID_N

7
esp/.vscode/extensions.json vendored Executable file
View File

@ -0,0 +1,7 @@
{
// See http://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format
"recommendations": [
"platformio.platformio-ide"
]
}

View File

@ -1,2 +0,0 @@
testserver
octo.jpg

View File

@ -1,7 +0,0 @@
Copyright 2018 S. Kevin Hester-Chow, kevinh@geeksville.com (MIT License)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,93 +0,0 @@
# Micro-RTSP
This is a small library which can be used to serve up RTSP streams from
resource constrained MCUs. It lets you trivially make a $10 open source
RTSP video stream camera.
# Usage
This library works for ESP32/arduino targets but also for most any posixish platform.
## Example arduino/ESP32 usage
This library will work standalone, but it is _super_ easy to use if your app is platform.io based.
Just "pio lib install Micro-RTSP" to pull the latest version from their library server. If you want to use the OV2640
camera support you'll need to be targeting the espressif32 platform in your project.
See the [example platform.io app](/examples). It should build and run on virtually any of the $10
ESP32-CAM boards (such as M5CAM). The relevant bit of the code is included below. In short:
1. Listen for a TCP connection on the RTSP port with accept()
2. When a connection comes in, create a CRtspSession and OV2640Streamer camera streamer objects.
3. While the connection remains, call session->handleRequests(0) to handle any incoming client requests.
4. Every 100ms or so call session->broadcastCurrentFrame() to send new frames to any clients.
```
void loop()
{
uint32_t msecPerFrame = 100;
static uint32_t lastimage = millis();
// If we have an active client connection, just service that until gone
// (FIXME - support multiple simultaneous clients)
if(session) {
session->handleRequests(0); // we don't use a timeout here,
// instead we send only if we have new enough frames
uint32_t now = millis();
if(now > lastimage + msecPerFrame || now < lastimage) { // handle clock rollover
session->broadcastCurrentFrame(now);
lastimage = now;
// check if we are overrunning our max frame rate
now = millis();
if(now > lastimage + msecPerFrame)
printf("warning exceeding max frame rate of %d ms\n", now - lastimage);
}
if(session->m_stopped) {
delete session;
delete streamer;
session = NULL;
streamer = NULL;
}
}
else {
client = rtspServer.accept();
if(client) {
//streamer = new SimStreamer(&client, true); // our streamer for UDP/TCP based RTP transport
streamer = new OV2640Streamer(&client, cam); // our streamer for UDP/TCP based RTP transport
session = new CRtspSession(&client, streamer); // our threads RTSP session and state
}
}
}
```
## Example posix/linux usage
There is a small standalone example [here](/test/RTSPTestServer.cpp). You can build it by following [these](/test/README.md) directions. The usage of the two key classes (CRtspSession and SimStreamer) are very similar to to the ESP32 usage.
## Supporting new camera devices
Supporting new camera devices is quite simple. See OV2640Streamer for an example and implement streamImage()
by reading a frame from your camera.
# Structure and design notes
# Issues and sending pull requests
Please report issues and send pull requests. I'll happily reply. ;-)
# Credits
The server code was initially based on a great 2013 [tutorial](https://www.medialan.de/usecase0001.html) by Medialan.
# License
Copyright 2018 S. Kevin Hester-Chow, kevinh@geeksville.com (MIT License)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -1,18 +0,0 @@
* add instructions for example app
* push RTSP streams to other servers ( https://github.com/ant-media/Ant-Media-Server/wiki/Getting-Started )
* make stack larger so that the various scratch buffers (currently in bss) can be shared
* cleanup code to a less ugly unified coding standard
* support multiple simultaneous clients on the device
* make octocat test image work again (by changing encoding type from 1 to 0 (422 vs 420))
DONE:
* serve real jpegs (use correct quantization & huffman tables)
* test that both TCP and UDP clients work
* change framerate to something slow
* test remote access
* select a licence and put license into github
* find cause of new mystery pause when starting up in sim mode
* split sim code from real code via inheritence
* use device camera
* package the ESP32-CAM stuff as a library so I can depend on it
* package as a library https://docs.platformio.org/en/latest/librarymanager/creating.html#library-creating-examples

View File

@ -1,3 +0,0 @@
wifikeys.h
.pioenvs
.piolibdeps

View File

@ -1,223 +0,0 @@
#include "OV2640.h"
#include <WiFi.h>
#include <WebServer.h>
#include <WiFiClient.h>
#include "SimStreamer.h"
#include "OV2640Streamer.h"
#include "CRtspSession.h"
// #define ENABLE_OLED //if want use oled ,turn on thi macro
// #define SOFTAP_MODE // If you want to run our own softap turn this on
#define ENABLE_WEBSERVER
#define ENABLE_RTSPSERVER
#ifdef ENABLE_OLED
#include "SSD1306.h"
#define OLED_ADDRESS 0x3c
#define I2C_SDA 14
#define I2C_SCL 13
SSD1306Wire display(OLED_ADDRESS, I2C_SDA, I2C_SCL, GEOMETRY_128_32);
bool hasDisplay; // we probe for the device at runtime
#endif
OV2640 cam;
#ifdef ENABLE_WEBSERVER
WebServer server(80);
#endif
#ifdef ENABLE_RTSPSERVER
WiFiServer rtspServer(8554);
#endif
#ifdef SOFTAP_MODE
IPAddress apIP = IPAddress(192, 168, 1, 1);
#else
#include "wifikeys.h"
#endif
#ifdef ENABLE_WEBSERVER
void handle_jpg_stream(void)
{
WiFiClient client = server.client();
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-Type: multipart/x-mixed-replace; boundary=frame\r\n\r\n";
server.sendContent(response);
while (1)
{
cam.run();
if (!client.connected())
break;
response = "--frame\r\n";
response += "Content-Type: image/jpeg\r\n\r\n";
server.sendContent(response);
client.write((char *)cam.getfb(), cam.getSize());
server.sendContent("\r\n");
if (!client.connected())
break;
}
}
void handle_jpg(void)
{
WiFiClient client = server.client();
cam.run();
if (!client.connected())
{
return;
}
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-disposition: inline; filename=capture.jpg\r\n";
response += "Content-type: image/jpeg\r\n\r\n";
server.sendContent(response);
client.write((char *)cam.getfb(), cam.getSize());
}
void handleNotFound()
{
String message = "Server is running!\n\n";
message += "URI: ";
message += server.uri();
message += "\nMethod: ";
message += (server.method() == HTTP_GET) ? "GET" : "POST";
message += "\nArguments: ";
message += server.args();
message += "\n";
server.send(200, "text/plain", message);
}
#endif
#ifdef ENABLE_OLED
#define LCD_MESSAGE(msg) lcdMessage(msg)
#else
#define LCD_MESSAGE(msg)
#endif
#ifdef ENABLE_OLED
void lcdMessage(String msg)
{
if(hasDisplay) {
display.clear();
display.drawString(128 / 2, 32 / 2, msg);
display.display();
}
}
#endif
CStreamer *streamer;
void setup()
{
#ifdef ENABLE_OLED
hasDisplay = display.init();
if(hasDisplay) {
display.flipScreenVertically();
display.setFont(ArialMT_Plain_16);
display.setTextAlignment(TEXT_ALIGN_CENTER);
}
#endif
LCD_MESSAGE("booting");
Serial.begin(115200);
while (!Serial)
{
;
}
cam.init(esp32cam_config);
IPAddress ip;
#ifdef SOFTAP_MODE
const char *hostname = "devcam";
// WiFi.hostname(hostname); // FIXME - find out why undefined
LCD_MESSAGE("starting softAP");
WiFi.mode(WIFI_AP);
WiFi.softAPConfig(apIP, apIP, IPAddress(255, 255, 255, 0));
bool result = WiFi.softAP(hostname, "12345678", 1, 0);
if (!result)
{
Serial.println("AP Config failed.");
return;
}
else
{
Serial.println("AP Config Success.");
Serial.print("AP MAC: ");
Serial.println(WiFi.softAPmacAddress());
ip = WiFi.softAPIP();
}
#else
LCD_MESSAGE(String("join ") + ssid);
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED)
{
delay(500);
Serial.print(F("."));
}
ip = WiFi.localIP();
Serial.println(F("WiFi connected"));
Serial.println("");
Serial.println(ip);
#endif
LCD_MESSAGE(ip.toString());
#ifdef ENABLE_WEBSERVER
server.on("/", HTTP_GET, handle_jpg_stream);
server.on("/jpg", HTTP_GET, handle_jpg);
server.onNotFound(handleNotFound);
server.begin();
#endif
#ifdef ENABLE_RTSPSERVER
rtspServer.begin();
//streamer = new SimStreamer(true); // our streamer for UDP/TCP based RTP transport
streamer = new OV2640Streamer(cam); // our streamer for UDP/TCP based RTP transport
#endif
}
void loop()
{
#ifdef ENABLE_WEBSERVER
server.handleClient();
#endif
#ifdef ENABLE_RTSPSERVER
uint32_t msecPerFrame = 100;
static uint32_t lastimage = millis();
// If we have an active client connection, just service that until gone
streamer->handleRequests(0); // we don't use a timeout here,
// instead we send only if we have new enough frames
uint32_t now = millis();
if(streamer->anySessions()) {
if(now > lastimage + msecPerFrame || now < lastimage) { // handle clock rollover
streamer->streamImage(now);
lastimage = now;
// check if we are overrunning our max frame rate
now = millis();
if(now > lastimage + msecPerFrame) {
printf("warning exceeding max frame rate of %d ms\n", now - lastimage);
}
}
}
WiFiClient rtspClient = rtspServer.accept();
if(rtspClient) {
Serial.print("client: ");
Serial.print(rtspClient.remoteIP());
Serial.println();
streamer->addSession(rtspClient);
}
#endif
}

View File

@ -1,15 +0,0 @@
; PlatformIO Project Configuration File
;
; Build options: build flags, source filter
; Upload options: custom upload port, speed and extra flags
; Library options: dependencies, extra library storages
; Advanced options: extra scripting
;
; Please visit documentation for the other options and examples
; https://docs.platformio.org/page/projectconf.html
[env:m5stack-core-esp32]
platform = espressif32@>=1.6.0
board = m5stack-core-esp32
framework = arduino
lib_deps = Micro-RTSP

View File

@ -1,3 +0,0 @@
// copy this file to wifikeys.h and edit
const char *ssid = "S O S"; // Put your SSID here
const char *password = "SandraArSnygg"; // Put your PASSWORD here

View File

@ -1,3 +0,0 @@
// copy this file to wifikeys.h and edit
const char *ssid = "YOURNETHERE"; // Put your SSID here
const char *password = "YOURPASSWORDHERE"; // Put your PASSWORD here

View File

@ -1,22 +0,0 @@
{
"name": "Micro-RTSP",
"keywords": "esp32, camera, esp32-cam, rtsp",
"description": "A small/efficient RTSP server for ESP32 and other micros",
"repository":
{
"type": "git",
"url": "https://github.com/geeksville/Micro-RTSP.git"
},
"authors":
[
{
"name": "Kevin Hester",
"email": "kevinh@geeksville.com",
"url": "https://github.com/geeksville",
"maintainer": true
}
],
"version": "0.1.6",
"frameworks": "arduino",
"platforms": "*"
}

View File

@ -1,9 +0,0 @@
name=Micro-RTSP
version=0.1.6
author=Kevin Hester
maintainer=Kevin Hester <kevinh@geeksville.com>
sentence=Mikro RTSP server for mikros
paragraph=A small/efficient RTSP server for ESP32 and other micros
category=Data Storage
url=https://github.com/geeksville/Micro-RTSP.git
architectures=*

View File

@ -1,422 +0,0 @@
#include "CRtspSession.h"
#include <stdio.h>
#include <time.h>
CRtspSession::CRtspSession(WiFiClient& aClient, CStreamer * aStreamer) : LinkedListElement(aStreamer->getClientsListHead()),
m_Client(aClient),
m_Streamer(aStreamer)
{
printf("Creating RTSP session\n");
Init();
m_RtspClient = &m_Client;
m_RtspSessionID = getRandom(); // create a session ID
m_RtspSessionID |= 0x80000000;
m_StreamID = -1;
m_ClientRTPPort = 0;
m_ClientRTCPPort = 0;
m_TcpTransport = false;
m_streaming = false;
m_stopped = false;
m_RtpClientPort = 0;
m_RtcpClientPort = 0;
};
CRtspSession::~CRtspSession()
{
m_Streamer->ReleaseUdpTransport();
closesocket(m_RtspClient);
};
void CRtspSession::Init()
{
m_RtspCmdType = RTSP_UNKNOWN;
memset(m_URLPreSuffix, 0x00, sizeof(m_URLPreSuffix));
memset(m_URLSuffix, 0x00, sizeof(m_URLSuffix));
memset(m_CSeq, 0x00, sizeof(m_CSeq));
memset(m_URLHostPort, 0x00, sizeof(m_URLHostPort));
m_ContentLength = 0;
};
bool CRtspSession::ParseRtspRequest(char const * aRequest, unsigned aRequestSize)
{
char CmdName[RTSP_PARAM_STRING_MAX];
static char CurRequest[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
unsigned CurRequestSize;
Init();
CurRequestSize = aRequestSize;
memcpy(CurRequest,aRequest,aRequestSize);
// check whether the request contains information about the RTP/RTCP UDP client ports (SETUP command)
char * ClientPortPtr;
char * TmpPtr;
static char CP[1024];
char * pCP;
ClientPortPtr = strstr(CurRequest,"client_port");
if (ClientPortPtr != nullptr)
{
TmpPtr = strstr(ClientPortPtr,"\r\n");
if (TmpPtr != nullptr)
{
TmpPtr[0] = 0x00;
strcpy(CP,ClientPortPtr);
pCP = strstr(CP,"=");
if (pCP != nullptr)
{
pCP++;
strcpy(CP,pCP);
pCP = strstr(CP,"-");
if (pCP != nullptr)
{
pCP[0] = 0x00;
m_ClientRTPPort = atoi(CP);
m_ClientRTCPPort = m_ClientRTPPort + 1;
};
};
};
};
// Read everything up to the first space as the command name
bool parseSucceeded = false;
unsigned i;
for (i = 0; i < sizeof(CmdName)-1 && i < CurRequestSize; ++i)
{
char c = CurRequest[i];
if (c == ' ' || c == '\t')
{
parseSucceeded = true;
break;
}
CmdName[i] = c;
}
CmdName[i] = '\0';
if (!parseSucceeded) {
printf("failed to parse RTSP\n");
return false;
}
printf("RTSP received %s\n", CmdName);
// find out the command type
if (strstr(CmdName,"OPTIONS") != nullptr) m_RtspCmdType = RTSP_OPTIONS; else
if (strstr(CmdName,"DESCRIBE") != nullptr) m_RtspCmdType = RTSP_DESCRIBE; else
if (strstr(CmdName,"SETUP") != nullptr) m_RtspCmdType = RTSP_SETUP; else
if (strstr(CmdName,"PLAY") != nullptr) m_RtspCmdType = RTSP_PLAY; else
if (strstr(CmdName,"TEARDOWN") != nullptr) m_RtspCmdType = RTSP_TEARDOWN;
// check whether the request contains transport information (UDP or TCP)
if (m_RtspCmdType == RTSP_SETUP)
{
TmpPtr = strstr(CurRequest,"RTP/AVP/TCP");
if (TmpPtr != nullptr) m_TcpTransport = true; else m_TcpTransport = false;
};
// Skip over the prefix of any "rtsp://" or "rtsp:/" URL that follows:
unsigned j = i+1;
while (j < CurRequestSize && (CurRequest[j] == ' ' || CurRequest[j] == '\t')) ++j; // skip over any additional white space
for (; (int)j < (int)(CurRequestSize-8); ++j)
{
if ((CurRequest[j] == 'r' || CurRequest[j] == 'R') &&
(CurRequest[j+1] == 't' || CurRequest[j+1] == 'T') &&
(CurRequest[j+2] == 's' || CurRequest[j+2] == 'S') &&
(CurRequest[j+3] == 'p' || CurRequest[j+3] == 'P') &&
CurRequest[j+4] == ':' && CurRequest[j+5] == '/')
{
j += 6;
if (CurRequest[j] == '/')
{ // This is a "rtsp://" URL; skip over the host:port part that follows:
++j;
unsigned uidx = 0;
while (j < CurRequestSize && CurRequest[j] != '/' && CurRequest[j] != ' ' && uidx < sizeof(m_URLHostPort) - 1)
{ // extract the host:port part of the URL here
m_URLHostPort[uidx] = CurRequest[j];
uidx++;
++j;
};
}
else --j;
i = j;
break;
}
}
// Look for the URL suffix (before the following "RTSP/"):
parseSucceeded = false;
for (unsigned k = i+1; (int)k < (int)(CurRequestSize-5); ++k)
{
if (CurRequest[k] == 'R' && CurRequest[k+1] == 'T' &&
CurRequest[k+2] == 'S' && CurRequest[k+3] == 'P' &&
CurRequest[k+4] == '/')
{
while (--k >= i && CurRequest[k] == ' ') {}
unsigned k1 = k;
while (k1 > i && CurRequest[k1] != '/') --k1;
if (k - k1 + 1 > sizeof(m_URLSuffix)) return false;
unsigned n = 0, k2 = k1+1;
while (k2 <= k) m_URLSuffix[n++] = CurRequest[k2++];
m_URLSuffix[n] = '\0';
if (k1 - i > sizeof(m_URLPreSuffix)) return false;
n = 0; k2 = i + 1;
while (k2 <= k1 - 1) m_URLPreSuffix[n++] = CurRequest[k2++];
m_URLPreSuffix[n] = '\0';
i = k + 7;
parseSucceeded = true;
break;
}
}
if (!parseSucceeded) return false;
// Look for "CSeq:", skip whitespace, then read everything up to the next \r or \n as 'CSeq':
parseSucceeded = false;
for (j = i; (int)j < (int)(CurRequestSize-5); ++j)
{
if (CurRequest[j] == 'C' && CurRequest[j+1] == 'S' &&
CurRequest[j+2] == 'e' && CurRequest[j+3] == 'q' &&
CurRequest[j+4] == ':')
{
j += 5;
while (j < CurRequestSize && (CurRequest[j] == ' ' || CurRequest[j] == '\t')) ++j;
unsigned n;
for (n = 0; n < sizeof(m_CSeq)-1 && j < CurRequestSize; ++n,++j)
{
char c = CurRequest[j];
if (c == '\r' || c == '\n')
{
parseSucceeded = true;
break;
}
m_CSeq[n] = c;
}
m_CSeq[n] = '\0';
break;
}
}
if (!parseSucceeded) return false;
// Also: Look for "Content-Length:" (optional)
for (j = i; (int)j < (int)(CurRequestSize-15); ++j)
{
if (CurRequest[j] == 'C' && CurRequest[j+1] == 'o' &&
CurRequest[j+2] == 'n' && CurRequest[j+3] == 't' &&
CurRequest[j+4] == 'e' && CurRequest[j+5] == 'n' &&
CurRequest[j+6] == 't' && CurRequest[j+7] == '-' &&
(CurRequest[j+8] == 'L' || CurRequest[j+8] == 'l') &&
CurRequest[j+9] == 'e' && CurRequest[j+10] == 'n' &&
CurRequest[j+11] == 'g' && CurRequest[j+12] == 't' &&
CurRequest[j+13] == 'h' && CurRequest[j+14] == ':')
{
j += 15;
while (j < CurRequestSize && (CurRequest[j] == ' ' || CurRequest[j] == '\t')) ++j;
unsigned num;
if (sscanf(&CurRequest[j], "%u", &num) == 1) m_ContentLength = num;
}
}
return true;
};
RTSP_CMD_TYPES CRtspSession::Handle_RtspRequest(char const * aRequest, unsigned aRequestSize)
{
if (ParseRtspRequest(aRequest,aRequestSize))
{
switch (m_RtspCmdType)
{
case RTSP_OPTIONS: { Handle_RtspOPTION(); break; };
case RTSP_DESCRIBE: { Handle_RtspDESCRIBE(); break; };
case RTSP_SETUP: { Handle_RtspSETUP(); break; };
case RTSP_PLAY: { Handle_RtspPLAY(); break; };
default: {};
};
};
return m_RtspCmdType;
};
void CRtspSession::Handle_RtspOPTION()
{
static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"Public: DESCRIBE, SETUP, TEARDOWN, PLAY, PAUSE\r\n\r\n",m_CSeq);
socketsend(m_RtspClient,Response,strlen(Response));
}
void CRtspSession::Handle_RtspDESCRIBE()
{
static char Response[1024]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
static char SDPBuf[1024];
static char URLBuf[1024];
// check whether we know a stream with the URL which is requested
m_StreamID = -1; // invalid URL
if ((strcmp(m_URLPreSuffix,"mjpeg") == 0) && (strcmp(m_URLSuffix,"1") == 0)) m_StreamID = 0; else
if ((strcmp(m_URLPreSuffix,"mjpeg") == 0) && (strcmp(m_URLSuffix,"2") == 0)) m_StreamID = 1;
if (m_StreamID == -1)
{ // Stream not available
snprintf(Response,sizeof(Response),
"RTSP/1.0 404 Stream Not Found\r\nCSeq: %s\r\n%s\r\n",
m_CSeq,
DateHeader());
socketsend(m_RtspClient,Response,strlen(Response));
return;
};
// simulate DESCRIBE server response
static char OBuf[256];
char * ColonPtr;
strcpy(OBuf,m_URLHostPort);
ColonPtr = strstr(OBuf,":");
if (ColonPtr != nullptr) ColonPtr[0] = 0x00;
snprintf(SDPBuf,sizeof(SDPBuf),
"v=0\r\n"
"o=- %d 1 IN IP4 %s\r\n"
"s=\r\n"
"t=0 0\r\n" // start / stop - 0 -> unbounded and permanent session
"m=video 0 RTP/AVP 26\r\n" // currently we just handle UDP sessions
// "a=x-dimensions: 640,480\r\n"
"c=IN IP4 0.0.0.0\r\n",
rand(),
OBuf);
char StreamName[64];
switch (m_StreamID)
{
case 0: strcpy(StreamName,"mjpeg/1"); break;
case 1: strcpy(StreamName,"mjpeg/2"); break;
};
snprintf(URLBuf,sizeof(URLBuf),
"rtsp://%s/%s",
m_URLHostPort,
StreamName);
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"%s\r\n"
"Content-Base: %s/\r\n"
"Content-Type: application/sdp\r\n"
"Content-Length: %d\r\n\r\n"
"%s",
m_CSeq,
DateHeader(),
URLBuf,
(int) strlen(SDPBuf),
SDPBuf);
socketsend(m_RtspClient,Response,strlen(Response));
}
void CRtspSession::InitTransport(u_short aRtpPort, u_short aRtcpPort)
{
m_RtpClientPort = aRtpPort;
m_RtcpClientPort = aRtcpPort;
if (!m_TcpTransport)
{ // allocate port pairs for RTP/RTCP ports in UDP transport mode
m_Streamer->InitUdpTransport();
};
};
void CRtspSession::Handle_RtspSETUP()
{
static char Response[1024];
static char Transport[255];
// init RTSP Session transport type (UDP or TCP) and ports for UDP transport
InitTransport(m_ClientRTPPort,m_ClientRTCPPort);
// simulate SETUP server response
if (m_TcpTransport)
snprintf(Transport,sizeof(Transport),"RTP/AVP/TCP;unicast;interleaved=0-1");
else
snprintf(Transport,sizeof(Transport),
"RTP/AVP;unicast;destination=127.0.0.1;source=127.0.0.1;client_port=%i-%i;server_port=%i-%i",
m_ClientRTPPort,
m_ClientRTCPPort,
m_Streamer->GetRtpServerPort(),
m_Streamer->GetRtcpServerPort());
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"%s\r\n"
"Transport: %s\r\n"
"Session: %i\r\n\r\n",
m_CSeq,
DateHeader(),
Transport,
m_RtspSessionID);
socketsend(m_RtspClient,Response,strlen(Response));
}
void CRtspSession::Handle_RtspPLAY()
{
static char Response[1024];
// simulate SETUP server response
snprintf(Response,sizeof(Response),
"RTSP/1.0 200 OK\r\nCSeq: %s\r\n"
"%s\r\n"
"Range: npt=0.000-\r\n"
"Session: %i\r\n"
"RTP-Info: url=rtsp://127.0.0.1:8554/mjpeg/1/track1\r\n\r\n",
m_CSeq,
DateHeader(),
m_RtspSessionID);
socketsend(m_RtspClient,Response,strlen(Response));
}
char const * CRtspSession::DateHeader()
{
static char buf[200];
time_t tt = time(NULL);
strftime(buf, sizeof buf, "Date: %a, %b %d %Y %H:%M:%S GMT", gmtime(&tt));
return buf;
}
int CRtspSession::GetStreamID()
{
return m_StreamID;
};
/**
Read from our socket, parsing commands as possible.
*/
bool CRtspSession::handleRequests(uint32_t readTimeoutMs)
{
if(m_stopped)
return false; // Already closed down
static char RecvBuf[RTSP_BUFFER_SIZE]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
memset(RecvBuf,0x00,sizeof(RecvBuf));
int res = socketread(m_RtspClient,RecvBuf,sizeof(RecvBuf), readTimeoutMs);
if(res > 0) {
// we filter away everything which seems not to be an RTSP command: O-ption, D-escribe, S-etup, P-lay, T-eardown
if ((RecvBuf[0] == 'O') || (RecvBuf[0] == 'D') || (RecvBuf[0] == 'S') || (RecvBuf[0] == 'P') || (RecvBuf[0] == 'T'))
{
RTSP_CMD_TYPES C = Handle_RtspRequest(RecvBuf,res);
if (C == RTSP_PLAY)
m_streaming = true;
else if (C == RTSP_TEARDOWN)
m_stopped = true;
}
return true;
}
else if(res == 0) {
printf("client closed socket, exiting\n");
m_stopped = true;
return true;
}
else {
// Timeout on read
return false;
}
}

View File

@ -1,79 +0,0 @@
#pragma once
#include "LinkedListElement.h"
#include "CStreamer.h"
#include "platglue.h"
// supported command types
enum RTSP_CMD_TYPES
{
RTSP_OPTIONS,
RTSP_DESCRIBE,
RTSP_SETUP,
RTSP_PLAY,
RTSP_TEARDOWN,
RTSP_UNKNOWN
};
#define RTSP_BUFFER_SIZE 10000 // for incoming requests, and outgoing responses
#define RTSP_PARAM_STRING_MAX 200
#define MAX_HOSTNAME_LEN 256
class CRtspSession : public LinkedListElement
{
public:
CRtspSession(WiFiClient& aRtspClient, CStreamer * aStreamer);
~CRtspSession();
RTSP_CMD_TYPES Handle_RtspRequest(char const * aRequest, unsigned aRequestSize);
int GetStreamID();
/**
Read from our socket, parsing commands as possible.
return false if the read timed out
*/
bool handleRequests(uint32_t readTimeoutMs);
bool m_streaming;
bool m_stopped;
void InitTransport(u_short aRtpPort, u_short aRtcpPort);
bool isTcpTransport() { return m_TcpTransport; }
SOCKET& getClient() { return m_RtspClient; }
uint16_t getRtpClientPort() { return m_RtpClientPort; }
private:
void Init();
bool ParseRtspRequest(char const * aRequest, unsigned aRequestSize);
char const * DateHeader();
// RTSP request command handlers
void Handle_RtspOPTION();
void Handle_RtspDESCRIBE();
void Handle_RtspSETUP();
void Handle_RtspPLAY();
// global session state parameters
int m_RtspSessionID;
WiFiClient m_Client;
SOCKET m_RtspClient; // RTSP socket of that session
int m_StreamID; // number of simulated stream of that session
IPPORT m_ClientRTPPort; // client port for UDP based RTP transport
IPPORT m_ClientRTCPPort; // client port for UDP based RTCP transport
bool m_TcpTransport; // if Tcp based streaming was activated
CStreamer * m_Streamer; // the UDP or TCP streamer of that session
// parameters of the last received RTSP request
RTSP_CMD_TYPES m_RtspCmdType; // command type (if any) of the current request
char m_URLPreSuffix[RTSP_PARAM_STRING_MAX]; // stream name pre suffix
char m_URLSuffix[RTSP_PARAM_STRING_MAX]; // stream name suffix
char m_CSeq[RTSP_PARAM_STRING_MAX]; // RTSP command sequence number
char m_URLHostPort[MAX_HOSTNAME_LEN]; // host:port part of the URL
unsigned m_ContentLength; // SDP string size
uint16_t m_RtpClientPort; // RTP receiver port on client (in host byte order!)
uint16_t m_RtcpClientPort; // RTCP receiver port on client (in host byte order!)
};

View File

@ -1,412 +0,0 @@
#include "CStreamer.h"
#include "CRtspSession.h"
#include <stdio.h>
CStreamer::CStreamer(u_short width, u_short height) : m_Clients()
{
printf("Creating TSP streamer\n");
m_RtpServerPort = 0;
m_RtcpServerPort = 0;
m_SequenceNumber = 0;
m_Timestamp = 0;
m_SendIdx = 0;
m_RtpSocket = NULLSOCKET;
m_RtcpSocket = NULLSOCKET;
m_width = width;
m_height = height;
m_prevMsec = 0;
m_udpRefCount = 0;
};
CStreamer::~CStreamer()
{
LinkedListElement* element = m_Clients.m_Next;
CRtspSession* session = NULL;
while (element != &m_Clients)
{
session = static_cast<CRtspSession*>(element);
element = element->m_Next;
delete session;
}
};
void CStreamer::addSession(WiFiClient& aClient)
{
// printf("CStreamer::addSession\n");
CRtspSession* session = new CRtspSession(aClient, this); // our threads RTSP session and state
// we have it stored in m_Clients
}
int CStreamer::SendRtpPacket(unsigned const char * jpeg, int jpegLen, int fragmentOffset, BufPtr quant0tbl, BufPtr quant1tbl)
{
// printf("CStreamer::SendRtpPacket offset:%d - begin\n", fragmentOffset);
#define KRtpHeaderSize 12 // size of the RTP header
#define KJpegHeaderSize 8 // size of the special JPEG payload header
#define MAX_FRAGMENT_SIZE 1100 // FIXME, pick more carefully
int fragmentLen = MAX_FRAGMENT_SIZE;
if(fragmentLen + fragmentOffset > jpegLen) // Shrink last fragment if needed
fragmentLen = jpegLen - fragmentOffset;
bool isLastFragment = (fragmentOffset + fragmentLen) == jpegLen;
if (!m_Clients.NotEmpty())
{
return isLastFragment ? 0 : fragmentOffset;
}
// Do we have custom quant tables? If so include them per RFC
bool includeQuantTbl = quant0tbl && quant1tbl && fragmentOffset == 0;
uint8_t q = includeQuantTbl ? 128 : 0x5e;
static char RtpBuf[2048]; // Note: we assume single threaded, this large buf we keep off of the tiny stack
int RtpPacketSize = fragmentLen + KRtpHeaderSize + KJpegHeaderSize + (includeQuantTbl ? (4 + 64 * 2) : 0);
memset(RtpBuf,0x00,sizeof(RtpBuf));
// Prepare the first 4 byte of the packet. This is the Rtp over Rtsp header in case of TCP based transport
RtpBuf[0] = '$'; // magic number
RtpBuf[1] = 0; // number of multiplexed subchannel on RTPS connection - here the RTP channel
RtpBuf[2] = (RtpPacketSize & 0x0000FF00) >> 8;
RtpBuf[3] = (RtpPacketSize & 0x000000FF);
// Prepare the 12 byte RTP header
RtpBuf[4] = 0x80; // RTP version
RtpBuf[5] = 0x1a | (isLastFragment ? 0x80 : 0x00); // JPEG payload (26) and marker bit
RtpBuf[7] = m_SequenceNumber & 0x0FF; // each packet is counted with a sequence counter
RtpBuf[6] = m_SequenceNumber >> 8;
RtpBuf[8] = (m_Timestamp & 0xFF000000) >> 24; // each image gets a timestamp
RtpBuf[9] = (m_Timestamp & 0x00FF0000) >> 16;
RtpBuf[10] = (m_Timestamp & 0x0000FF00) >> 8;
RtpBuf[11] = (m_Timestamp & 0x000000FF);
RtpBuf[12] = 0x13; // 4 byte SSRC (sychronization source identifier)
RtpBuf[13] = 0xf9; // we just an arbitrary number here to keep it simple
RtpBuf[14] = 0x7e;
RtpBuf[15] = 0x67;
// Prepare the 8 byte payload JPEG header
RtpBuf[16] = 0x00; // type specific
RtpBuf[17] = (fragmentOffset & 0x00FF0000) >> 16; // 3 byte fragmentation offset for fragmented images
RtpBuf[18] = (fragmentOffset & 0x0000FF00) >> 8;
RtpBuf[19] = (fragmentOffset & 0x000000FF);
/* These sampling factors indicate that the chrominance components of
type 0 video is downsampled horizontally by 2 (often called 4:2:2)
while the chrominance components of type 1 video are downsampled both
horizontally and vertically by 2 (often called 4:2:0). */
RtpBuf[20] = 0x00; // type (fixme might be wrong for camera data) https://tools.ietf.org/html/rfc2435
RtpBuf[21] = q; // quality scale factor was 0x5e
RtpBuf[22] = m_width / 8; // width / 8
RtpBuf[23] = m_height / 8; // height / 8
int headerLen = 24; // Inlcuding jpeg header but not qant table header
if(includeQuantTbl) { // we need a quant header - but only in first packet of the frame
//printf("inserting quanttbl\n");
RtpBuf[24] = 0; // MBZ
RtpBuf[25] = 0; // 8 bit precision
RtpBuf[26] = 0; // MSB of lentgh
int numQantBytes = 64; // Two 64 byte tables
RtpBuf[27] = 2 * numQantBytes; // LSB of length
headerLen += 4;
memcpy(RtpBuf + headerLen, quant0tbl, numQantBytes);
headerLen += numQantBytes;
memcpy(RtpBuf + headerLen, quant1tbl, numQantBytes);
headerLen += numQantBytes;
}
// printf("Sending timestamp %d, seq %d, fragoff %d, fraglen %d, jpegLen %d\n", m_Timestamp, m_SequenceNumber, fragmentOffset, fragmentLen, jpegLen);
// append the JPEG scan data to the RTP buffer
memcpy(RtpBuf + headerLen,jpeg + fragmentOffset, fragmentLen);
fragmentOffset += fragmentLen;
m_SequenceNumber++; // prepare the packet counter for the next packet
IPADDRESS otherip;
IPPORT otherport;
// RTP marker bit must be set on last fragment
LinkedListElement* element = m_Clients.m_Next;
CRtspSession* session = NULL;
while (element != &m_Clients)
{
session = static_cast<CRtspSession*>(element);
if (session->m_streaming && !session->m_stopped) {
if (session->isTcpTransport()) // RTP over RTSP - we send the buffer + 4 byte additional header
socketsend(session->getClient(),RtpBuf,RtpPacketSize + 4);
else // UDP - we send just the buffer by skipping the 4 byte RTP over RTSP header
{
socketpeeraddr(session->getClient(), &otherip, &otherport);
udpsocketsend(m_RtpSocket,&RtpBuf[4],RtpPacketSize, otherip, session->getRtpClientPort());
}
}
element = element->m_Next;
}
// printf("CStreamer::SendRtpPacket offset:%d - end\n", fragmentOffset);
return isLastFragment ? 0 : fragmentOffset;
};
u_short CStreamer::GetRtpServerPort()
{
return m_RtpServerPort;
};
u_short CStreamer::GetRtcpServerPort()
{
return m_RtcpServerPort;
};
bool CStreamer::InitUdpTransport(void)
{
if (m_udpRefCount != 0)
{
++m_udpRefCount;
return true;
}
for (u_short P = 6970; P < 0xFFFE; P += 2)
{
m_RtpSocket = udpsocketcreate(P);
if (m_RtpSocket)
{ // Rtp socket was bound successfully. Lets try to bind the consecutive Rtsp socket
m_RtcpSocket = udpsocketcreate(P + 1);
if (m_RtcpSocket)
{
m_RtpServerPort = P;
m_RtcpServerPort = P+1;
break;
}
else
{
udpsocketclose(m_RtpSocket);
udpsocketclose(m_RtcpSocket);
};
}
};
++m_udpRefCount;
return true;
}
void CStreamer::ReleaseUdpTransport(void)
{
--m_udpRefCount;
if (m_udpRefCount == 0)
{
m_RtpServerPort = 0;
m_RtcpServerPort = 0;
udpsocketclose(m_RtpSocket);
udpsocketclose(m_RtcpSocket);
m_RtpSocket = NULLSOCKET;
m_RtcpSocket = NULLSOCKET;
}
}
/**
Call handleRequests on all sessions
*/
bool CStreamer::handleRequests(uint32_t readTimeoutMs)
{
bool retVal = true;
LinkedListElement* element = m_Clients.m_Next;
while(element != &m_Clients)
{
CRtspSession* session = static_cast<CRtspSession*>(element);
retVal &= session->handleRequests(readTimeoutMs);
element = element->m_Next;
if (session->m_stopped)
{
// remove session here, so we wont have to send to it
delete session;
}
}
return retVal;
}
void CStreamer::streamFrame(unsigned const char *data, uint32_t dataLen, uint32_t curMsec)
{
if(m_prevMsec == 0) // first frame init our timestamp
m_prevMsec = curMsec;
// compute deltat (being careful to handle clock rollover with a little lie)
uint32_t deltams = (curMsec >= m_prevMsec) ? curMsec - m_prevMsec : 100;
m_prevMsec = curMsec;
// locate quant tables if possible
BufPtr qtable0, qtable1;
if(!decodeJPEGfile(&data, &dataLen, &qtable0, &qtable1)) {
printf("can't decode jpeg data\n");
return;
}
int offset = 0;
do {
offset = SendRtpPacket(data, dataLen, offset, qtable0, qtable1);
} while(offset != 0);
// Increment ONLY after a full frame
uint32_t units = 90000; // Hz per RFC 2435
m_Timestamp += (units * deltams / 1000); // fixed timestamp increment for a frame rate of 25fps
m_SendIdx++;
if (m_SendIdx > 1) m_SendIdx = 0;
};
#include <assert.h>
// search for a particular JPEG marker, moves *start to just after that marker
// This function fixes up the provided start ptr to point to the
// actual JPEG stream data and returns the number of bytes skipped
// APP0 e0
// DQT db
// DQT db
// DHT c4
// DHT c4
// DHT c4
// DHT c4
// SOF0 c0 baseline (not progressive) 3 color 0x01 Y, 0x21 2h1v, 0x00 tbl0
// - 0x02 Cb, 0x11 1h1v, 0x01 tbl1 - 0x03 Cr, 0x11 1h1v, 0x01 tbl1
// therefore 4:2:2, with two separate quant tables (0 and 1)
// SOS da
// EOI d9 (no need to strip data after this RFC says client will discard)
bool findJPEGheader(BufPtr *start, uint32_t *len, uint8_t marker) {
// per https://en.wikipedia.org/wiki/JPEG_File_Interchange_Format
unsigned const char *bytes = *start;
// kinda skanky, will break if unlucky and the headers inxlucde 0xffda
// might fall off array if jpeg is invalid
// FIXME - return false instead
while(bytes - *start < *len) {
uint8_t framing = *bytes++; // better be 0xff
if(framing != 0xff) {
printf("malformed jpeg, framing=%x\n", framing);
return false;
}
uint8_t typecode = *bytes++;
if(typecode == marker) {
unsigned skipped = bytes - *start;
//printf("found marker 0x%x, skipped %d\n", marker, skipped);
*start = bytes;
// shrink len for the bytes we just skipped
*len -= skipped;
return true;
}
else {
// not the section we were looking for, skip the entire section
switch(typecode) {
case 0xd8: // start of image
{
break; // no data to skip
}
case 0xe0: // app0
case 0xdb: // dqt
case 0xc4: // dht
case 0xc0: // sof0
case 0xda: // sos
{
// standard format section with 2 bytes for len. skip that many bytes
uint32_t len = bytes[0] * 256 + bytes[1];
//printf("skipping section 0x%x, %d bytes\n", typecode, len);
bytes += len;
break;
}
default:
printf("unexpected jpeg typecode 0x%x\n", typecode);
break;
}
}
}
printf("failed to find jpeg marker 0x%x", marker);
return false;
}
// the scan data uses byte stuffing to guarantee anything that starts with 0xff
// followed by something not zero, is a new section. Look for that marker and return the ptr
// pointing there
void skipScanBytes(BufPtr *start) {
BufPtr bytes = *start;
while(true) { // FIXME, check against length
while(*bytes++ != 0xff);
if(*bytes++ != 0) {
*start = bytes - 2; // back up to the 0xff marker we just found
return;
}
}
}
void nextJpegBlock(BufPtr *bytes) {
uint32_t len = (*bytes)[0] * 256 + (*bytes)[1];
//printf("going to next jpeg block %d bytes\n", len);
*bytes += len;
}
// When JPEG is stored as a file it is wrapped in a container
// This function fixes up the provided start ptr to point to the
// actual JPEG stream data and returns the number of bytes skipped
bool decodeJPEGfile(BufPtr *start, uint32_t *len, BufPtr *qtable0, BufPtr *qtable1) {
// per https://en.wikipedia.org/wiki/JPEG_File_Interchange_Format
unsigned const char *bytes = *start;
if(!findJPEGheader(&bytes, len, 0xd8)) // better at least look like a jpeg file
return false; // FAILED!
// Look for quant tables if they are present
*qtable0 = NULL;
*qtable1 = NULL;
BufPtr quantstart = *start;
uint32_t quantlen = *len;
if(!findJPEGheader(&quantstart, &quantlen, 0xdb)) {
printf("error can't find quant table 0\n");
}
else {
// printf("found quant table %x\n", quantstart[2]);
*qtable0 = quantstart + 3; // 3 bytes of header skipped
nextJpegBlock(&quantstart);
if(!findJPEGheader(&quantstart, &quantlen, 0xdb)) {
printf("error can't find quant table 1\n");
}
else {
// printf("found quant table %x\n", quantstart[2]);
}
*qtable1 = quantstart + 3;
nextJpegBlock(&quantstart);
}
if(!findJPEGheader(start, len, 0xda))
return false; // FAILED!
// Skip the header bytes of the SOS marker FIXME why doesn't this work?
uint32_t soslen = (*start)[0] * 256 + (*start)[1];
*start += soslen;
*len -= soslen;
// start scanning the data portion of the scan to find the end marker
BufPtr endmarkerptr = *start;
uint32_t endlen = *len;
skipScanBytes(&endmarkerptr);
if(!findJPEGheader(&endmarkerptr, &endlen, 0xd9))
return false; // FAILED!
// endlen must now be the # of bytes between the start of our scan and
// the end marker, tell the caller to ignore bytes afterwards
*len = endmarkerptr - *start;
return true;
}

View File

@ -1,65 +0,0 @@
#pragma once
#include "platglue.h"
#include "LinkedListElement.h"
typedef unsigned const char *BufPtr;
class CStreamer
{
public:
CStreamer(u_short width, u_short height);
virtual ~CStreamer();
void addSession(WiFiClient& aClient);
LinkedListElement* getClientsListHead() { return &m_Clients; }
int anySessions() { return m_Clients.NotEmpty(); }
bool handleRequests(uint32_t readTimeoutMs);
u_short GetRtpServerPort();
u_short GetRtcpServerPort();
virtual void streamImage(uint32_t curMsec) = 0; // send a new image to the client
bool InitUdpTransport(void);
void ReleaseUdpTransport(void);
protected:
void streamFrame(unsigned const char *data, uint32_t dataLen, uint32_t curMsec);
private:
int SendRtpPacket(unsigned const char *jpeg, int jpegLen, int fragmentOffset, BufPtr quant0tbl = NULL, BufPtr quant1tbl = NULL);// returns new fragmentOffset or 0 if finished with frame
UDPSOCKET m_RtpSocket; // RTP socket for streaming RTP packets to client
UDPSOCKET m_RtcpSocket; // RTCP socket for sending/receiving RTCP packages
IPPORT m_RtpServerPort; // RTP sender port on server
IPPORT m_RtcpServerPort; // RTCP sender port on server
u_short m_SequenceNumber;
uint32_t m_Timestamp;
int m_SendIdx;
LinkedListElement m_Clients;
uint32_t m_prevMsec;
int m_udpRefCount;
u_short m_width; // image data info
u_short m_height;
};
// When JPEG is stored as a file it is wrapped in a container
// This function fixes up the provided start ptr to point to the
// actual JPEG stream data and returns the number of bytes skipped
// returns true if the file seems to be valid jpeg
// If quant tables can be found they will be stored in qtable0/1
bool decodeJPEGfile(BufPtr *start, uint32_t *len, BufPtr *qtable0, BufPtr *qtable1);
bool findJPEGheader(BufPtr *start, uint32_t *len, uint8_t marker);
// Given a jpeg ptr pointing to a pair of length bytes, advance the pointer to
// the next 0xff marker byte
void nextJpegBlock(BufPtr *start);

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +0,0 @@
#pragma once
#ifndef ARDUINO_ARCH_ESP32
#define INCLUDE_SIMDATA
#endif
#ifdef INCLUDE_SIMDATA
extern unsigned const char capture_jpg[];
extern unsigned const char octo_jpg[];
extern unsigned int octo_jpg_len, capture_jpg_len;
#endif

View File

@ -1,43 +0,0 @@
#pragma once
#include "platglue.h"
#include <stdio.h>
class LinkedListElement
{
public:
LinkedListElement* m_Next;
LinkedListElement* m_Prev;
LinkedListElement(void)
{
m_Next = this;
m_Prev = this;
printf("LinkedListElement (%p)->(%p)->(%p)\n", m_Prev, this, m_Next);
}
int NotEmpty(void)
{
return (m_Next != this);
}
LinkedListElement(LinkedListElement* linkedList)
{
// add to the end of list
m_Prev = linkedList->m_Prev;
linkedList->m_Prev = this;
m_Prev->m_Next = this;
m_Next = linkedList;
printf("LinkedListElement (%p)->(%p)->(%p)\n", m_Prev, this, m_Next);
}
~LinkedListElement()
{
printf("~LinkedListElement(%p)->(%p)->(%p)\n", m_Prev, this, m_Next);
if (m_Next)
m_Next->m_Prev = m_Prev;
if (m_Prev)
m_Prev->m_Next = m_Next;
printf("~LinkedListElement after: (%p)->(%p)", m_Prev, m_Prev->m_Next);
}
};

View File

@ -1,201 +0,0 @@
#include "OV2640.h"
#define TAG "OV2640"
// definitions appropriate for the ESP32-CAM devboard (and most clones)
camera_config_t esp32cam_config{
.pin_pwdn = -1, // FIXME: on the TTGO T-Journal I think this is GPIO 0
.pin_reset = 15,
.pin_xclk = 27,
.pin_sscb_sda = 25,
.pin_sscb_scl = 23,
.pin_d7 = 19,
.pin_d6 = 36,
.pin_d5 = 18,
.pin_d4 = 39,
.pin_d3 = 5,
.pin_d2 = 34,
.pin_d1 = 35,
.pin_d0 = 17,
.pin_vsync = 22,
.pin_href = 26,
.pin_pclk = 21,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,
.pixel_format = PIXFORMAT_JPEG,
// .frame_size = FRAMESIZE_UXGA, // needs 234K of framebuffer space
// .frame_size = FRAMESIZE_SXGA, // needs 160K for framebuffer
// .frame_size = FRAMESIZE_XGA, // needs 96K or even smaller FRAMESIZE_SVGA - can work if using only 1 fb
.frame_size = FRAMESIZE_SVGA,
.jpeg_quality = 12, //0-63 lower numbers are higher quality
.fb_count = 2 // if more than one i2s runs in continous mode. Use only with jpeg
};
camera_config_t esp32cam_aithinker_config{
.pin_pwdn = 32,
.pin_reset = -1,
.pin_xclk = 0,
.pin_sscb_sda = 26,
.pin_sscb_scl = 27,
// Note: LED GPIO is apparently 4 not sure where that goes
// per https://github.com/donny681/ESP32_CAMERA_QR/blob/e4ef44549876457cd841f33a0892c82a71f35358/main/led.c
.pin_d7 = 35,
.pin_d6 = 34,
.pin_d5 = 39,
.pin_d4 = 36,
.pin_d3 = 21,
.pin_d2 = 19,
.pin_d1 = 18,
.pin_d0 = 5,
.pin_vsync = 25,
.pin_href = 23,
.pin_pclk = 22,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_1,
.ledc_channel = LEDC_CHANNEL_1,
.pixel_format = PIXFORMAT_JPEG,
// .frame_size = FRAMESIZE_UXGA, // needs 234K of framebuffer space
// .frame_size = FRAMESIZE_SXGA, // needs 160K for framebuffer
// .frame_size = FRAMESIZE_XGA, // needs 96K or even smaller FRAMESIZE_SVGA - can work if using only 1 fb
.frame_size = FRAMESIZE_SVGA,
.jpeg_quality = 12, //0-63 lower numbers are higher quality
.fb_count = 2 // if more than one i2s runs in continous mode. Use only with jpeg
};
camera_config_t esp32cam_ttgo_t_config{
.pin_pwdn = 26,
.pin_reset = -1,
.pin_xclk = 32,
.pin_sscb_sda = 13,
.pin_sscb_scl = 12,
.pin_d7 = 39,
.pin_d6 = 36,
.pin_d5 = 23,
.pin_d4 = 18,
.pin_d3 = 15,
.pin_d2 = 4,
.pin_d1 = 14,
.pin_d0 = 5,
.pin_vsync = 27,
.pin_href = 25,
.pin_pclk = 19,
.xclk_freq_hz = 20000000,
.ledc_timer = LEDC_TIMER_0,
.ledc_channel = LEDC_CHANNEL_0,
.pixel_format = PIXFORMAT_JPEG,
.frame_size = FRAMESIZE_SVGA,
.jpeg_quality = 12, //0-63 lower numbers are higher quality
.fb_count = 2 // if more than one i2s runs in continous mode. Use only with jpeg
};
void OV2640::done(void)
{
if (fb) {
//return the frame buffer back to the driver for reuse
esp_camera_fb_return(fb);
fb = NULL;
}
}
void OV2640::run(void)
{
if (fb)
//return the frame buffer back to the driver for reuse
esp_camera_fb_return(fb);
fb = esp_camera_fb_get();
}
void OV2640::runIfNeeded(void)
{
if (!fb)
run();
}
int OV2640::getWidth(void)
{
runIfNeeded();
return fb->width;
}
int OV2640::getHeight(void)
{
runIfNeeded();
return fb->height;
}
size_t OV2640::getSize(void)
{
runIfNeeded();
if (!fb)
return 0; // FIXME - this shouldn't be possible but apparently the new cam board returns null sometimes?
return fb->len;
}
uint8_t *OV2640::getfb(void)
{
runIfNeeded();
if (!fb)
return NULL; // FIXME - this shouldn't be possible but apparently the new cam board returns null sometimes?
return fb->buf;
}
framesize_t OV2640::getFrameSize(void)
{
return _cam_config.frame_size;
}
void OV2640::setFrameSize(framesize_t size)
{
_cam_config.frame_size = size;
}
pixformat_t OV2640::getPixelFormat(void)
{
return _cam_config.pixel_format;
}
void OV2640::setPixelFormat(pixformat_t format)
{
switch (format)
{
case PIXFORMAT_RGB565:
case PIXFORMAT_YUV422:
case PIXFORMAT_GRAYSCALE:
case PIXFORMAT_JPEG:
_cam_config.pixel_format = format;
break;
default:
_cam_config.pixel_format = PIXFORMAT_GRAYSCALE;
break;
}
}
esp_err_t OV2640::init(camera_config_t config)
{
memset(&_cam_config, 0, sizeof(_cam_config));
memcpy(&_cam_config, &config, sizeof(config));
esp_err_t err = esp_camera_init(&_cam_config);
if (err != ESP_OK)
{
printf("Camera probe failed with error 0x%x", err);
return err;
}
// ESP_ERROR_CHECK(gpio_install_isr_service(0));
return ESP_OK;
}

View File

@ -1,44 +0,0 @@
#ifndef OV2640_H_
#define OV2640_H_
#include <Arduino.h>
#include <pgmspace.h>
#include <stdio.h>
#include "esp_log.h"
#include "esp_attr.h"
#include "esp_camera.h"
extern camera_config_t esp32cam_config, esp32cam_aithinker_config, esp32cam_ttgo_t_config;
class OV2640
{
public:
OV2640(){
fb = NULL;
};
~OV2640(){
};
esp_err_t init(camera_config_t config);
void done(void);
void run(void);
size_t getSize(void);
uint8_t *getfb(void);
int getWidth(void);
int getHeight(void);
framesize_t getFrameSize(void);
pixformat_t getPixelFormat(void);
void setFrameSize(framesize_t size);
void setPixelFormat(pixformat_t format);
private:
void runIfNeeded(); // grab a frame if we don't already have one
// camera_framesize_t _frame_size;
// camera_pixelformat_t _pixel_format;
camera_config_t _cam_config;
camera_fb_t *fb;
};
#endif //OV2640_H_

View File

@ -1,19 +0,0 @@
#include "OV2640Streamer.h"
#include <assert.h>
OV2640Streamer::OV2640Streamer(OV2640 &cam) : CStreamer(cam.getWidth(), cam.getHeight()), m_cam(cam)
{
printf("Created streamer width=%d, height=%d\n", cam.getWidth(), cam.getHeight());
}
void OV2640Streamer::streamImage(uint32_t curMsec)
{
m_cam.run();// queue up a read for next time
BufPtr bytes = m_cam.getfb();
streamFrame(bytes, m_cam.getSize(), curMsec);
m_cam.done();
}

View File

@ -1,15 +0,0 @@
#pragma once
#include "CStreamer.h"
#include "OV2640.h"
class OV2640Streamer : public CStreamer
{
bool m_showBig;
OV2640 &m_cam;
public:
OV2640Streamer(OV2640 &cam);
virtual void streamImage(uint32_t curMsec);
};

View File

@ -1,28 +0,0 @@
#include "SimStreamer.h"
#include "JPEGSamples.h"
#ifdef INCLUDE_SIMDATA
SimStreamer::SimStreamer(bool showBig) : CStreamer(showBig ? 800 : 640, showBig ? 600 : 480)
{
m_showBig = showBig;
}
void SimStreamer::streamImage(uint32_t curMsec)
{
if(m_showBig) {
BufPtr bytes = capture_jpg;
uint32_t len = capture_jpg_len;
streamFrame(bytes, len, curMsec);
}
else {
BufPtr bytes = octo_jpg;
uint32_t len = octo_jpg_len;
streamFrame(bytes, len, curMsec);
}
}
#endif

View File

@ -1,15 +0,0 @@
#pragma once
#include "JPEGSamples.h"
#include "CStreamer.h"
#ifdef INCLUDE_SIMDATA
class SimStreamer : public CStreamer
{
bool m_showBig;
public:
SimStreamer(bool showBig);
virtual void streamImage(uint32_t curMsec);
};
#endif

View File

@ -1,107 +0,0 @@
#pragma once
#include <Arduino.h>
#include <WiFiClient.h>
#include <WiFiUdp.h>
#include <sys/socket.h>
#include <netinet/in.h>
//#include <arpa/inet.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <errno.h>
typedef WiFiClient *SOCKET;
typedef WiFiUDP *UDPSOCKET;
typedef IPAddress IPADDRESS; // On linux use uint32_t in network byte order (per getpeername)
typedef uint16_t IPPORT; // on linux use network byte order
#define NULLSOCKET NULL
inline void closesocket(SOCKET s) {
printf("closing TCP socket\n");
if(s) {
s->stop();
// delete s; TDP WiFiClients are never on the heap in arduino land?
}
}
#define getRandom() random(65536)
inline void socketpeeraddr(SOCKET s, IPADDRESS *addr, IPPORT *port) {
*addr = s->remoteIP();
*port = s->remotePort();
}
inline void udpsocketclose(UDPSOCKET s) {
printf("closing UDP socket\n");
if(s) {
s->stop();
delete s;
}
}
inline UDPSOCKET udpsocketcreate(unsigned short portNum)
{
UDPSOCKET s = new WiFiUDP();
if(!s->begin(portNum)) {
printf("Can't bind port %d\n", portNum);
delete s;
return NULL;
}
return s;
}
// TCP sending
inline ssize_t socketsend(SOCKET sockfd, const void *buf, size_t len)
{
return sockfd->write((uint8_t *) buf, len);
}
inline ssize_t udpsocketsend(UDPSOCKET sockfd, const void *buf, size_t len,
IPADDRESS destaddr, IPPORT destport)
{
sockfd->beginPacket(destaddr, destport);
sockfd->write((const uint8_t *) buf, len);
if(!sockfd->endPacket())
printf("error sending udp packet\n");
return len;
}
/**
Read from a socket with a timeout.
Return 0=socket was closed by client, -1=timeout, >0 number of bytes read
*/
inline int socketread(SOCKET sock, char *buf, size_t buflen, int timeoutmsec)
{
if(!sock->connected()) {
printf("client has closed the socket\n");
return 0;
}
int numAvail = sock->available();
if(numAvail == 0 && timeoutmsec != 0) {
// sleep and hope for more
delay(timeoutmsec);
numAvail = sock->available();
}
if(numAvail == 0) {
// printf("timeout on read\n");
return -1;
}
else {
// int numRead = sock->readBytesUntil('\n', buf, buflen);
int numRead = sock->readBytes(buf, buflen);
// printf("bytes avail %d, read %d: %s", numAvail, numRead, buf);
return numRead;
}
}

View File

@ -1,94 +0,0 @@
/**
* @author Marco Garzola
*/
#pragma once
#include "mbed.h"
typedef TCPSocket* SOCKET;
typedef UDPSocket* UDPSOCKET;
typedef SocketAddress IPADDRESS;
typedef uint16_t IPPORT;
#define SEND_TMEOUT_MS 1000
#define NULLSOCKET NULL
inline void closesocket(SOCKET s)
{
if (s)
{
s->close();
}
}
#define getRandom() rand()
inline void socketpeeraddr(SOCKET s, IPADDRESS* addr, IPPORT* port)
{
s->getpeername(addr);
*port = addr->get_port();
}
inline UDPSOCKET udpsocketcreate(unsigned short portNum)
{
UDPSOCKET s = new UDPSocket();
if (s->open(NetworkInterface::get_default_instance()) != 0 && s->bind(portNum) != 0)
{
printf("Can't bind port %d\n", portNum);
delete s;
return nullptr;
}
return s;
}
inline void udpsocketclose(UDPSOCKET s)
{
if (s)
{
s->close();
delete s;
}
}
inline ssize_t
udpsocketsend(UDPSOCKET sockfd, const void* buf, size_t len, IPADDRESS destaddr, uint16_t destport)
{
if (sockfd)
{
return sockfd->sendto(destaddr.get_ip_address(), destport, buf, len);
}
else
{
return 0;
}
}
// TCP sending
inline ssize_t socketsend(SOCKET sockfd, const void* buf, size_t len)
{
if (sockfd && buf)
{
sockfd->set_blocking(true);
sockfd->set_timeout(SEND_TMEOUT_MS);
return sockfd->send(buf, len);
}
else
{
return 0;
}
}
inline int socketread(SOCKET sock, char* buf, size_t buflen, int timeoutmsec)
{
if (sock && buf)
{
sock->set_blocking(true);
sock->set_timeout(timeoutmsec);
return sock->recv(buf, buflen);
}
else
{
return -1;
}
}

View File

@ -1,111 +0,0 @@
#pragma once
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <errno.h>
typedef int SOCKET;
typedef int UDPSOCKET;
typedef uint32_t IPADDRESS; // On linux use uint32_t in network byte order (per getpeername)
typedef uint16_t IPPORT; // on linux use network byte order
#define NULLSOCKET 0
inline void closesocket(SOCKET s) {
close(s);
}
#define getRandom() rand()
inline void socketpeeraddr(SOCKET s, IPADDRESS *addr, IPPORT *port) {
sockaddr_in r;
socklen_t len = sizeof(r);
if(getpeername(s,(struct sockaddr*)&r,&len) < 0) {
printf("getpeername failed\n");
*addr = 0;
*port = 0;
}
else {
//htons
*port = r.sin_port;
*addr = r.sin_addr.s_addr;
}
}
inline void udpsocketclose(UDPSOCKET s) {
close(s);
}
inline UDPSOCKET udpsocketcreate(unsigned short portNum)
{
sockaddr_in addr;
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = INADDR_ANY;
int s = socket(AF_INET, SOCK_DGRAM, 0);
addr.sin_port = htons(portNum);
if (bind(s,(sockaddr*)&addr,sizeof(addr)) != 0) {
printf("Error, can't bind\n");
close(s);
s = 0;
}
return s;
}
// TCP sending
inline ssize_t socketsend(SOCKET sockfd, const void *buf, size_t len)
{
// printf("TCP send\n");
return send(sockfd, buf, len, 0);
}
inline ssize_t udpsocketsend(UDPSOCKET sockfd, const void *buf, size_t len,
IPADDRESS destaddr, uint16_t destport)
{
sockaddr_in addr;
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = destaddr;
addr.sin_port = htons(destport);
//printf("UDP send to 0x%0x:%0x\n", destaddr, destport);
return sendto(sockfd, buf, len, 0, (sockaddr *) &addr, sizeof(addr));
}
/**
Read from a socket with a timeout.
Return 0=socket was closed by client, -1=timeout, >0 number of bytes read
*/
inline int socketread(SOCKET sock, char *buf, size_t buflen, int timeoutmsec)
{
// Use a timeout on our socket read to instead serve frames
struct timeval tv;
tv.tv_sec = 0;
tv.tv_usec = timeoutmsec * 1000; // send a new frame ever
setsockopt(sock, SOL_SOCKET, SO_RCVTIMEO, &tv, sizeof tv);
int res = recv(sock,buf,buflen,0);
if(res > 0) {
return res;
}
else if(res == 0) {
return 0; // client dropped connection
}
else {
if (errno == EWOULDBLOCK || errno == EAGAIN)
return -1;
else
return 0; // unknown error, just claim client dropped it
};
}

View File

@ -1,7 +0,0 @@
#pragma once
#ifdef ARDUINO_ARCH_ESP32
#include "platglue-esp32.h"
#else
#include "platglue-posix.h"
#endif

View File

@ -1,7 +0,0 @@
SRCS = ../src/CRtspSession.cpp ../src/CStreamer.cpp ../src/JPEGSamples.cpp ../src/SimStreamer.cpp
run: *.cpp ../src/*
skill testerver
g++ -o testserver -I ../src -I . *.cpp $(SRCS)
./testserver

View File

@ -1,15 +0,0 @@
# Testserver
This is a standalone Linux test application to allow development of this
library without going through the slow process of always testing on the ESP32.
Almost all of the code is the same - only platglue-posix.h differs from
platglue-esp32.h (thus serving as a crude HAL).
RESPTestServer.cpp also serves as a small example of how this library could
be used on Poxix systems.
# Usage
Run "make" to build and run the server. Run "runvlc.sh" to fire up a VLC client
that talks to that server. If all is working you should see a static image
of my office that I captured using a ESP32-CAM.

View File

@ -1,69 +0,0 @@
#include "platglue.h"
#include "SimStreamer.h"
#include "CRtspSession.h"
#include "JPEGSamples.h"
#include <assert.h>
#include <sys/time.h>
void workerThread(SOCKET s)
{
SimStreamer streamer(s, true); // our streamer for UDP/TCP based RTP transport
CRtspSession rtsp(s, &streamer); // our threads RTSP session and state
while (!rtsp.m_stopped)
{
uint32_t timeout = 400;
if(!rtsp.handleRequests(timeout)) {
struct timeval now;
gettimeofday(&now, NULL); // crufty msecish timer
uint32_t msec = now.tv_sec * 1000 + now.tv_usec / 1000;
rtsp.broadcastCurrentFrame(msec);
}
}
}
int main()
{
SOCKET MasterSocket; // our masterSocket(socket that listens for RTSP client connections)
SOCKET ClientSocket; // RTSP socket to handle an client
sockaddr_in ServerAddr; // server address parameters
sockaddr_in ClientAddr; // address parameters of a new RTSP client
socklen_t ClientAddrLen = sizeof(ClientAddr);
printf("running RTSP server\n");
ServerAddr.sin_family = AF_INET;
ServerAddr.sin_addr.s_addr = INADDR_ANY;
ServerAddr.sin_port = htons(8554); // listen on RTSP port 8554
MasterSocket = socket(AF_INET,SOCK_STREAM,0);
int enable = 1;
if (setsockopt(MasterSocket, SOL_SOCKET, SO_REUSEADDR, &enable, sizeof(int)) < 0) {
printf("setsockopt(SO_REUSEADDR) failed");
return 0;
}
// bind our master socket to the RTSP port and listen for a client connection
if (bind(MasterSocket,(sockaddr*)&ServerAddr,sizeof(ServerAddr)) != 0) {
printf("error can't bind port errno=%d\n", errno);
return 0;
}
if (listen(MasterSocket,5) != 0) return 0;
while (true)
{ // loop forever to accept client connections
ClientSocket = accept(MasterSocket,(struct sockaddr*)&ClientAddr,&ClientAddrLen);
printf("Client connected. Client address: %s\r\n",inet_ntoa(ClientAddr.sin_addr));
if(fork() == 0)
workerThread(ClientSocket);
}
closesocket(MasterSocket);
return 0;
}

View File

@ -1,2 +0,0 @@
# for testing
vlc -v rtsp://192.168.86.215:8554/mjpeg/1

View File

@ -1,111 +0,0 @@
#include "platglue.h"
#include "SimStreamer.h"
#include "CRtspSession.h"
#include "JPEGSamples.h"
// From RFC2435 generates standard quantization tables
/*
* Table K.1 from JPEG spec.
*/
static const int jpeg_luma_quantizer[64] = {
16, 11, 10, 16, 24, 40, 51, 61,
12, 12, 14, 19, 26, 58, 60, 55,
14, 13, 16, 24, 40, 57, 69, 56,
14, 17, 22, 29, 51, 87, 80, 62,
18, 22, 37, 56, 68, 109, 103, 77,
24, 35, 55, 64, 81, 104, 113, 92,
49, 64, 78, 87, 103, 121, 120, 101,
72, 92, 95, 98, 112, 100, 103, 99
};
/*
* Table K.2 from JPEG spec.
*/
static const int jpeg_chroma_quantizer[64] = {
17, 18, 24, 47, 99, 99, 99, 99,
18, 21, 26, 66, 99, 99, 99, 99,
24, 26, 56, 99, 99, 99, 99, 99,
47, 66, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99,
99, 99, 99, 99, 99, 99, 99, 99
};
/*
* Call MakeTables with the Q factor and two u_char[64] return arrays
*/
void
MakeTables(int q, u_char *lqt, u_char *cqt)
{
int i;
int factor = q;
if (q < 1) factor = 1;
if (q > 99) factor = 99;
if (q < 50)
q = 5000 / factor;
else
q = 200 - factor*2;
for (i=0; i < 64; i++) {
int lq = (jpeg_luma_quantizer[i] * q + 50) / 100;
int cq = (jpeg_chroma_quantizer[i] * q + 50) / 100;
/* Limit the quantizers to 1 <= q <= 255 */
if (lq < 1) lq = 1;
else if (lq > 255) lq = 255;
lqt[i] = lq;
if (cq < 1) cq = 1;
else if (cq > 255) cq = 255;
cqt[i] = cq;
}
}
// analyze an imge from our camera to find which quant table it is using...
// Used to see if our camera is spitting out standard RTP tables (it isn't)
// So we have to use Q of 255 to indicate that each frame has unique quant tables
// use 0 for precision in the qant header, 64 for length
void findCameraQuant()
{
BufPtr bytes = capture_jpg;
uint32_t len = capture_jpg_len;
if(!findJPEGheader(&bytes, &len, 0xdb)) {
printf("error can't find quant table 0\n");
return;
}
else {
printf("found quant table %x (len %d)\n", bytes[2], bytes[1]);
}
BufPtr qtable0 = bytes + 3; // 3 bytes of header skipped
nextJpegBlock(&bytes);
if(!findJPEGheader(&bytes, &len, 0xdb)) {
printf("error can't find quant table 1\n");
return;
}
else {
printf("found quant table %x\n", bytes[2]);
}
BufPtr qtable1 = bytes + 3;
nextJpegBlock(&bytes);
for(int q = 0; q < 128; q++) {
uint8_t lqt[64], cqt[64];
MakeTables(q, lqt, cqt);
if(memcmp(qtable0, lqt, sizeof(lqt)) == 0 && memcmp(qtable1, cqt, sizeof(cqt)) == 0) {
printf("Found matching quant table %d\n", q);
}
}
printf("No matching quant table found!\n");
}

View File

@ -1,2 +0,0 @@
# for testing
vlc -v rtsp://127.0.0.1:8554/mjpeg/1

View File

@ -1,224 +0,0 @@
#include <Arduino.h>
#include "OV2640.h"
#include <WiFi.h>
#include <WebServer.h>
#include <WiFiClient.h>
#include "SimStreamer.h"
#include "OV2640Streamer.h"
#include "CRtspSession.h"
// #define ENABLE_OLED //if want use oled ,turn on thi macro
// #define SOFTAP_MODE // If you want to run our own softap turn this on
#define ENABLE_WEBSERVER
#define ENABLE_RTSPSERVER
#ifdef ENABLE_OLED
#include "SSD1306.h"
#define OLED_ADDRESS 0x3c
#define I2C_SDA 14
#define I2C_SCL 13
SSD1306Wire display(OLED_ADDRESS, I2C_SDA, I2C_SCL, GEOMETRY_128_32);
bool hasDisplay; // we probe for the device at runtime
#endif
OV2640 cam;
#ifdef ENABLE_WEBSERVER
WebServer server(80);
#endif
#ifdef ENABLE_RTSPSERVER
WiFiServer rtspServer(8554);
#endif
#ifdef SOFTAP_MODE
IPAddress apIP = IPAddress(192, 168, 1, 1);
#else
#include "wifikeys.h"
#endif
#ifdef ENABLE_WEBSERVER
void handle_jpg_stream(void)
{
WiFiClient client = server.client();
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-Type: multipart/x-mixed-replace; boundary=frame\r\n\r\n";
server.sendContent(response);
while (1)
{
cam.run();
if (!client.connected())
break;
response = "--frame\r\n";
response += "Content-Type: image/jpeg\r\n\r\n";
server.sendContent(response);
client.write((char *)cam.getfb(), cam.getSize());
server.sendContent("\r\n");
if (!client.connected())
break;
}
}
void handle_jpg(void)
{
WiFiClient client = server.client();
cam.run();
if (!client.connected())
{
return;
}
String response = "HTTP/1.1 200 OK\r\n";
response += "Content-disposition: inline; filename=capture.jpg\r\n";
response += "Content-type: image/jpeg\r\n\r\n";
server.sendContent(response);
client.write((char *)cam.getfb(), cam.getSize());
}
void handleNotFound()
{
String message = "Server is running!\n\n";
message += "URI: ";
message += server.uri();
message += "\nMethod: ";
message += (server.method() == HTTP_GET) ? "GET" : "POST";
message += "\nArguments: ";
message += server.args();
message += "\n";
server.send(200, "text/plain", message);
}
#endif
#ifdef ENABLE_OLED
#define LCD_MESSAGE(msg) lcdMessage(msg)
#else
#define LCD_MESSAGE(msg)
#endif
#ifdef ENABLE_OLED
void lcdMessage(String msg)
{
if(hasDisplay) {
display.clear();
display.drawString(128 / 2, 32 / 2, msg);
display.display();
}
}
#endif
CStreamer *streamer;
void setup()
{
#ifdef ENABLE_OLED
hasDisplay = display.init();
if(hasDisplay) {
display.flipScreenVertically();
display.setFont(ArialMT_Plain_16);
display.setTextAlignment(TEXT_ALIGN_CENTER);
}
#endif
LCD_MESSAGE("booting");
Serial.begin(115200);
while (!Serial)
{
;
}
cam.init(esp32cam_config);
IPAddress ip;
#ifdef SOFTAP_MODE
const char *hostname = "devcam";
// WiFi.hostname(hostname); // FIXME - find out why undefined
LCD_MESSAGE("starting softAP");
WiFi.mode(WIFI_AP);
WiFi.softAPConfig(apIP, apIP, IPAddress(255, 255, 255, 0));
bool result = WiFi.softAP(hostname, "12345678", 1, 0);
if (!result)
{
Serial.println("AP Config failed.");
return;
}
else
{
Serial.println("AP Config Success.");
Serial.print("AP MAC: ");
Serial.println(WiFi.softAPmacAddress());
ip = WiFi.softAPIP();
}
#else
LCD_MESSAGE(String("join ") + ssid);
WiFi.mode(WIFI_STA);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED)
{
delay(500);
Serial.print(F("."));
}
ip = WiFi.localIP();
Serial.println(F("WiFi connected"));
Serial.println("");
Serial.println(ip);
#endif
LCD_MESSAGE(ip.toString());
#ifdef ENABLE_WEBSERVER
server.on("/", HTTP_GET, handle_jpg_stream);
server.on("/jpg", HTTP_GET, handle_jpg);
server.onNotFound(handleNotFound);
server.begin();
#endif
#ifdef ENABLE_RTSPSERVER
rtspServer.begin();
//streamer = new SimStreamer(true); // our streamer for UDP/TCP based RTP transport
streamer = new OV2640Streamer(cam); // our streamer for UDP/TCP based RTP transport
#endif
}
void loop()
{
#ifdef ENABLE_WEBSERVER
server.handleClient();
#endif
#ifdef ENABLE_RTSPSERVER
uint32_t msecPerFrame = 100;
static uint32_t lastimage = millis();
// If we have an active client connection, just service that until gone
streamer->handleRequests(0); // we don't use a timeout here,
// instead we send only if we have new enough frames
uint32_t now = millis();
if(streamer->anySessions()) {
if(now > lastimage + msecPerFrame || now < lastimage) { // handle clock rollover
streamer->streamImage(now);
lastimage = now;
// check if we are overrunning our max frame rate
now = millis();
if(now > lastimage + msecPerFrame) {
printf("warning exceeding max frame rate of %d ms\n", now - lastimage);
}
}
}
WiFiClient rtspClient = rtspServer.accept();
if(rtspClient) {
Serial.print("client: ");
Serial.print(rtspClient.remoteIP());
Serial.println();
streamer->addSession(rtspClient);
}
#endif
}

View File

@ -1,3 +0,0 @@
// copy this file to wifikeys.h and edit
const char *ssid = "S o S"; // Put your SSID here
const char *password = "SandraArSnygg"; // Put your PASSWORD here

0
water_meter/__init__.py Executable file
View File

BIN
water_meter/capture.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 357 KiB

BIN
water_meter/capture_3.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 370 KiB

BIN
water_meter/capture_4.png Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 368 KiB

View File

@ -7,7 +7,7 @@ def nothing(x):
# Load image
image = cv2.imread('capture_1.jpg')
image = cv2.imread('capture.png')
# Create a window
cv2.namedWindow('image')

View File

@ -5,7 +5,7 @@ def nothing(x):
pass
# Load image
image = cv2.imread('capture.jpg')
image = cv2.imread('capture.png')
# Create a window
cv2.namedWindow('image')

16
water_meter/read_stram.py Executable file
View File

@ -0,0 +1,16 @@
import cv2
def get_video():
camera = cv2.VideoCapture()
camera.open('http://10.0.0.22:81')
while True:
okay, frame = camera.read()
# cv2.imwrite('capture_4.png', frame)
# break
cv2.imshow('video', frame)
cv2.waitKey(1)
pass
if __name__ == '__main__':
get_video()

0
water_meter/src/__init__.py Executable file
View File

3
water_meter/src/config.yml Executable file
View File

@ -0,0 +1,3 @@
mqtt:
broker: 10.0.0.3
port: 1883

27
water_meter/src/helpers.py Executable file
View File

@ -0,0 +1,27 @@
import yaml
import logging
def get_config(config_filepath: str) -> dict:
with open(config_filepath) as f:
config = yaml.safe_load(f)
return config
def create_logger():
import multiprocessing, logging
logger = multiprocessing.get_logger()
logger.setLevel(logging.INFO)
formatter = logging.Formatter(\
'[%(asctime)s| %(levelname)s| %(message)s')
import os, os.path
if not os.path.exists("logs/"):
os.makedirs("logs/")
handler = logging.FileHandler('logs/water.log')
handler.setFormatter(formatter)
# this bit will make sure you won't have
# duplicated messages in the output
if not len(logger.handlers):
logger.addHandler(handler)
return logger

59
water_meter/src/main.py Executable file
View File

@ -0,0 +1,59 @@
import os
import time
from threading import Thread
from src.helpers import get_config, create_logger
from src.mqtt import get_mqtt_client
from src.water import ReportAmount, Water
from src.water_meter import WaterMeter
from queue import Queue
CONFIG_FILE_PATH = os.getenv("MQTT_CAMERA_CONFIG", "./config.yml")
CONFIG = get_config(CONFIG_FILE_PATH)
MQTT_BROKER = CONFIG["mqtt"]["broker"]
MQTT_PORT = CONFIG["mqtt"]["port"]
def producer():
logger = create_logger()
logger.info("From producer")
water_meter = WaterMeter('http://10.0.0.22:81', img='capture_4.png', debug=False)
water_meter.loop(q)
def consumer():
logger = create_logger()
logger.info("From consumer")
evaluator = ReportAmount(client, topic='water_meter/litre')
while True:
water = q.get(True)
evaluator.send(water)
def main():
client = get_mqtt_client()
client.connect(MQTT_BROKER, port=MQTT_PORT)
time.sleep(5)
while True:
from random import randrange
value = randrange(10)
client.publish('water_meter/litre', 1)
time.sleep(3)
def main2():
producer()
consumer_thread = Thread(target=consumer)
consumer_thread.daemon = True
consumer_thread.start()
if __name__ == '__main__':
logger = create_logger()
logger.info("From main")
q = Queue()
client = get_mqtt_client()
client.connect(MQTT_BROKER, port=MQTT_PORT)
main2()

33
water_meter/src/mqtt.py Executable file
View File

@ -0,0 +1,33 @@
"""
Some boilerplate code to handle MQTT.
"""
import os
from paho.mqtt import client as mqtt
from src.helpers import get_config
CONFIG_FILE_PATH = os.getenv("MQTT_CAMERA_CONFIG", "./config.yml")
CONFIG = get_config(CONFIG_FILE_PATH)
# Reqired callbacks
def on_connect(client, userdata, flags, rc):
# print(f"CONNACK received with code {rc}")
if rc == 0:
print("connected to MQTT broker")
client.connected_flag = True # set flag
else:
print("Bad connection to MQTT broker, returned code=", rc)
def on_publish(client, userdata, mid):
print("mid: " + str(mid))
def get_mqtt_client():
"""Return the MQTT client object."""
client = mqtt.Client()
client.connected_flag = False # set flag
client.on_connect = on_connect
client.on_publish = on_publish
return client

100
water_meter/src/water.py Executable file
View File

@ -0,0 +1,100 @@
from dataclasses import dataclass
import time
from threading import Thread
from queue import Queue
from .helpers import create_logger
@dataclass
class Water:
percent_of_a_litre: int
timestamp: float = time.time()
def coroutine(fn):
def wrapper(*args, **kwargs):
v = fn(*args, **kwargs)
v.send(None)
return v
return wrapper
class ReportAmount:
def __init__(self, client, topic):
self.current_state = self._redo()
self.report = self._report()
self.redo = self._redo()
self.reported = self._reported()
self.last_send_timestamp = 0
self.litre = 0
self.client = client
self.topic = topic
self.logger = create_logger()
def send(self, water):
self.current_state.send(water)
@coroutine
def _report(self):
while True:
water = yield
percentage = water.percent_of_a_litre
if percentage > 0:
self.litre += 1
self.client.publish(self.topic, 1)
self.logger.info(f"New litre, publish, amount: {self.litre}")
self.last_send_timestamp = water.timestamp
self.current_state = self.reported
#print(f"REPORT: Percentage is {percentage}, time {water.timestamp}")
@coroutine
def _reported(self):
while True:
water = yield
percentage = water.percent_of_a_litre
time_diff = water.timestamp - self.last_send_timestamp
if percentage > 25:
self.current_state = self.redo
# print(f"REPORTED: Wait until percentage is 50, {percentage}"
# f", timediff: {time_diff}")
@coroutine
def _redo(self):
while True:
water = yield
percentage = water.percent_of_a_litre
if percentage < 20:
print("Change to report")
self.current_state = self.report
#print(f"REDO: Percentage is {percentage}, time {water.timestamp}")
def producer():
for i in range(103):
q.put(Water(i % 100, time.time()))
time.sleep(0.1)
def consumer():
evaluator = ReportAmount()
while True:
water = q.get(True)
evaluator.send(water)
if __name__ == '__main__':
q = Queue()
t = Thread(target=consumer)
t.daemon = True
t.start()
t1 = Thread(target=producer)
t1.daemon = True
t1.start()
q.join()

View File

@ -7,16 +7,22 @@ import os
import numpy as np
import imutils
from scipy.spatial import distance as dist
from .water import Water
from .helpers import create_logger
class WaterMeter(object):
def __init__(self, url, img=None, debug=False):
self.url = url
self.img = img
self.debug = debug
self.cap = None
self._quit = False
self.percent_list = [0] * 10 # Init filling for moving avg
self.percent = 0
self.logger = create_logger()
def _init_camera(self):
self.cap = cv2.VideoCapture()
@ -37,7 +43,7 @@ class WaterMeter(object):
img = cv2.imread(self.img)
self.get_degree(img)
def loop(self):
def loop(self, queue):
bad_frame = 0
while not self._quit:
if self._quit:
@ -63,13 +69,24 @@ class WaterMeter(object):
bad_frame = 0
degree, img = self.get_degree(current_frame)
if degree < 0:
percent = (90 + abs(degree)) / 360
elif degree < 90 and degree != -1:
percent = (90 - degree) / 360
if degree < 0 and degree != -1:
degree = 360 - abs(degree)
if degree == -1 and self.debug:
print(f"Degree -1")
self.logger.debug(f"Degree -1")
else:
percent = (450 - degree) / 360
print(f"Degree is {degree} and percent is {percent * 100}")
percent = degree / 360
self.moving_average(percent)
# print(f"Degree <0, {degree} percent = {self.percent}")
self.logger.debug(f"Degree <0, {degree} percent = {self.percent}")
queue.put(Water(self.percent, time.time()))
fps = self.cap.get(cv2.CAP_PROP_FPS)
cv2.putText(current_frame, f"FPS: {fps}", (10, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
cv2.putText(current_frame, f"%: {self.percent}", (10, 60),
cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 255, 0), 2)
cv2.imshow("Degree", current_frame)
cv2.waitKey(1)
@ -102,17 +119,27 @@ class WaterMeter(object):
# Find needle in dial
needle = self.find_red_needle(cut)
if needle is None:
return -1, img
extreme_points = self.get_contours(needle, cut)
if not extreme_points:
return -1, img
deg, point = self.find_angle(extreme_points, cut)
cv2.circle(img, (ulx, uly), radius, (0, 255, 0), 4)
cv2.line(img,
((ulx - radius) + point['x'], uly - radius + point['y']),
(ulx, uly), (0, 255, 255), 2)
cv2.rectangle(img, (ulx - 5, uly - 5), (ulx + 5, uly + 5),
(0, 128, 255),
-1)
if self.debug:
cv2.imshow('image', img)
cv2.waitKey(0)
return deg, img
def find_circle(self, img):
@ -142,7 +169,10 @@ class WaterMeter(object):
return None
def find_red_needle(self, img):
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
try:
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
except cv2.error:
return None
# Find suitable hsv number by running detect_hsv.py
lower_red_hue = self.create_hue_mask(hsv, [0, 100, 100], [10, 255, 255])
higher_red_hue = self.create_hue_mask(hsv, [170, 80, 110], [179, 255, 255])
@ -162,10 +192,12 @@ class WaterMeter(object):
try:
c = max(cnts, key=cv2.contourArea)
except ValueError:
print("Not finding any needle")
timestr = time.strftime("%Y%m%d-%H%M%S")
cv2.imwrite('error_neddle' + timestr + '.png', img)
if self.debug:
print("Not finding any needle")
timestr = time.strftime("%Y%m%d-%H%M%S")
cv2.imwrite('error_neddle' + timestr + '.png', img)
return None
# determine the most extreme points along the contour
ext_left = tuple(c[c[:, :, 0].argmin()][0])
ext_right = tuple(c[c[:, :, 0].argmax()][0])
@ -212,11 +244,20 @@ class WaterMeter(object):
cv2.imshow('length', cut)
cv2.waitKey(0)
rad = math.atan2(length_from_centre['y'], length_from_centre['x'])
deg = math.degrees(rad)
xlen = length_from_centre['x'] - int(height / 2)
ylen = length_from_centre['y'] - int(width / 2)
rad = math.atan2(ylen, xlen)
deg = math.degrees(rad)
if self.debug:
print(f"rad {rad}, deg {deg}")
return deg, length_from_centre
def moving_average(self, percent):
self.percent_list.pop()
self.percent_list.insert(0, percent)
self.percent = sum(self.percent_list) / len(self.percent_list)
self.percent = round(self.percent * 100)
if __name__ == '__main__':
water_meter = WaterMeter('http://10.0.0.22:81', img='capture_4.png', debug=False)