initial commit

This commit is contained in:
Fam Zheng 2024-09-01 21:51:50 +01:00
commit 8a4f10aeed
968 changed files with 106535 additions and 0 deletions

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
./alg/opencv
.git

7
.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
build
/venv
/api/api/static/
/opencv
/emtest/target
/dataset/local
/detection/model

102
.gitlab-ci.yml Normal file
View File

@ -0,0 +1,102 @@
stages:
- test-and-build
- build-docker
- deploy
cache:
key: one-key-to-rule-them-all
paths:
- opencv/src
- opencv/contrib
- emtest/target
- venv
test:
stage: test-and-build
tags:
- i7
before_script:
- if ! test -d venv; then python3 -m venv venv; fi
- source venv/bin/activate
- pip3 install -r requirements.txt
script:
- make opencv
- make -C alg qrtool
- make test
build-alg:
stage: test-and-build
tags:
- i7
script:
- make opencv
- make build/alg/qrtool
artifacts:
paths:
- build
- alg/qrtool
build-web:
stage: test-and-build
tags:
- i7
before_script:
- (cd web; npm install)
script:
- make web
artifacts:
paths:
- build
build-docker:
stage: build-docker
tags:
- i7
script:
- make docker-build
- make docker-push
dependencies:
- build-web
- build-alg
except:
- main
deploy-dev:
stage: deploy
tags:
- i7
only:
- dev
script:
- make deploy-api-dev
cache: []
dev-smoke:
stage: test-and-build
tags:
- i7
allow_failure: true
script:
- ./scripts/emcli --env dev activate 0074253255108
- ./api/scripts/api_smoke.py -p $EMBLEM_CI_PASSWORD
cache: []
deploy-prod:
stage: deploy
tags:
- i7
only:
- main
script:
- make docker-push-prod
- make deploy-api-prod
cache: []
deploy-roi-worker:
tags:
- emblem-s1
stage: deploy
when: manual
script:
- make deploy-roi-worker
cache: []

19
Dockerfile Normal file
View File

@ -0,0 +1,19 @@
FROM ubuntu:22.04
ADD packages.txt packages.txt
RUN apt-get update -y && DEBIAN_FRONTEND=noninteractive apt-get install -y $(cat packages.txt)
RUN pip3 install torch==1.13.0 torchvision==0.14.0 torchaudio==0.13.0
ADD requirements.txt requirements.txt
RUN pip3 install -r requirements.txt
ADD detection /emblem/detection
ADD alg /emblem/alg
ADD api /emblem/api
ADD web /emblem/web
RUN cd /emblem/api/api && ./manage.py collectstatic --noinput
RUN mkdir -p /emblem/log
ADD scripts /emblem/scripts
ADD nginx.conf /emblem/nginx.conf
ADD dataset/topleft/topleft-0518.jpeg /tmp/topleft-test.jpg
ADD nginx.conf /emblem/nginx.conf
RUN cd /emblem/alg/ && ./qrtool topleft /tmp/topleft-test.jpg
WORKDIR /emblem
CMD /emblem/scripts/entrypoint

144
Makefile Normal file
View File

@ -0,0 +1,144 @@
.PHONY: FORCE
IMAGE_TAG := $(shell git rev-parse --short HEAD)
IMAGE_REPO := registry.gitlab.com/euphon/themblem
IMAGE_REPO_PROD := registry.cn-shenzhen.aliyuncs.com/emblem/themblem
IMAGE := $(IMAGE_REPO):$(IMAGE_TAG)
IMAGE_PROD := $(IMAGE_REPO_PROD):$(IMAGE_TAG)
ifeq ($(shell uname), Darwin)
BUILD_SHARED_LIBS := ON
else
BUILD_SHARED_LIBS := OFF
endif
API_FILES := \
$(addprefix build/, \
$(shell find -L \
api/ip2region.db \
api/api \
api/scripts \
-type f)\
)
WEB_FILES := \
$(addprefix build/, \
$(shell find -L \
web/dist \
-type f)\
)
DETECTION_FILES := \
$(addprefix build/, \
$(shell find -L \
detection \
-type f \
-not -name '*.pyc' \
) \
)
SCRIPTS_FILES := \
$(addprefix build/, \
$(shell find -L \
scripts \
-type f \
) \
)
DATASET_FILES := \
$(addprefix build/, \
$(shell find -L \
dataset \
-name topleft-0518.jpeg \
) \
)
ALG_FILES := \
$(addprefix build/, \
$(shell find -L \
alg/qrtool \
alg/wechat_qrcode \
) \
)
docker-build: build/Dockerfile build/packages.txt build/requirements.txt \
build/nginx.conf $(WEB_FILES) $(API_FILES) $(ALG_FILES) $(DETECTION_FILES) $(SCRIPTS_FILES) $(DATASET_FILES)
find build
docker build --network=host -t $(IMAGE) build
docker-push:
docker push $(IMAGE)
docker-push-prod:
docker tag $(IMAGE) $(IMAGE_PROD)
docker push $(IMAGE_PROD)
web: FORCE
cd web && npm run build
mkdir -p build/web
cp -r web/dist build/web/dist
build/%: %
mkdir -p $(shell dirname $@)
cp -a $^ $@
deploy-api-dev:
curl -X POST https://euphon-alert-23358.famzheng.workers.dev/ -d 'Deploying Emblem API to dev: $(IMAGE)'
kubectl --kubeconfig deploy/kubeconfig.dev set image deploy api emblem=$(IMAGE)
kubectl --kubeconfig deploy/kubeconfig.dev rollout status --timeout=1h deploy api
deploy-api-prod:
curl -X POST https://euphon-alert-23358.famzheng.workers.dev/ -d 'Deploying Emblem API to prod: $(IMAGE_PROD)'
kubectl --kubeconfig deploy/kubeconfig.themblem set image deploy api emblem=$(IMAGE_PROD)
kubectl --kubeconfig deploy/kubeconfig.themblem rollout status --timeout=1h deploy api
deploy-roi-worker:
curl -X POST https://euphon-alert-23358.famzheng.workers.dev/ -d 'Deploying ROI Worker to emblem-s1: $(IMAGE)'
kubectl --kubeconfig deploy/kubeconfig.emblem-s1 set image deploy roi-worker alg=$(IMAGE)
kubectl --kubeconfig deploy/kubeconfig.emblem-s1 rollout status --timeout=1h deploy roi-worker
test: FORCE
cd emtest && cargo test -- --nocapture
make -C api test
make -C detection test
OPENCV_TAG := 4.9.0
opencv/src/LICENSE:
rm -rf opencv/src opencv/contrib
git clone --depth=1 https://github.com/opencv/opencv_contrib opencv/contrib -b $(OPENCV_TAG)
git clone --depth=1 https://github.com/opencv/opencv opencv/src -b $(OPENCV_TAG)
opencv: opencv/src/LICENSE FORCE
mkdir -p opencv/build/cpp opencv/install
cd opencv/build/cpp && cmake \
-D CMAKE_BUILD_TYPE=RELEASE \
-D CMAKE_INSTALL_PREFIX=$(PWD)/opencv/install \
-D OPENCV_GENERATE_PKGCONFIG=ON \
-D BUILD_EXAMPLES=OFF \
-D INSTALL_PYTHON_EXAMPLES=OFF \
-D INSTALL_C_EXAMPLES=OFF \
-D BUILD_TESTS=OFF \
-D BUILD_PERF_TESTS=OFF \
-D OPENCV_EXTRA_MODULES_PATH=$(PWD)/opencv/contrib/modules \
-D BUILD_opencv_python2=OFF \
-D BUILD_opencv_python3=OFF \
-D WITH_PROTOBUF=ON \
-D BUILD_SHARED_LIBS=$(BUILD_SHARED_LIBS) \
-D WITH_GTK=OFF \
-D WITH_TIFF=OFF \
../../src
$(MAKE) -C opencv/build/cpp
$(MAKE) -C opencv/build/cpp install
opencv.js: opencv/src/LICENSE FORCE
mkdir -p opencv/build/wasm
python3 opencv/src/platforms/js/build_js.py opencv/build/wasm \
--build_wasm \
--enable_exception \
--cmake_option="-DOPENCV_EXTRA_MODULES_PATH=$(PWD)/opencv/contrib/modules/" \
--cmake_option="-DWITH_PROTOBUF=off" \
--emscripten_dir=../emsdk/upstream/emscripten/ \
--disable_single_file
alg/qrtool:
make -C alg qrtool

1
README.md Normal file
View File

@ -0,0 +1 @@
# themblem.com project

6
alg/.dockerignore Normal file
View File

@ -0,0 +1,6 @@
opencv/.git
opencv/.cache
opencv/euphon/build
opencv/build_wasm
.git
/dataset/local

15
alg/.gitignore vendored Normal file
View File

@ -0,0 +1,15 @@
/qrtool
*.wasm
*.wasm.*
*.html
qrtool.js
*.dSYM
.DS_Store
qrtool.*.js
/lib
*.o
*.pb.cc
*.pb.h
/dataset/local
qrtool.zip
/dataset/scan/*.roi.jpg

View File

@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 5
}

265
alg/Makefile Normal file
View File

@ -0,0 +1,265 @@
.PHONY: FORCE default
CV_DIR := $(shell pwd)/../opencv
CV_INSTALL_DIR := $(CV_DIR)/install
CV_WASM_DIR := $(CV_DIR)/build/wasm
CXX := ccache g++
CXXFLAGS := -O2 -std=c++17 -Wall -Werror -g -I$(CV_INSTALL_DIR)/include/opencv4
ifeq ($(shell uname), Darwin)
RPATH_FLAG := -Wl,-rpath,'@executable_path/lib'
STATIC :=
IMAGE_VIEWER := open
else
RPATH_FLAG := -Wl,-rpath,'$$ORIGIN/lib'
STATIC := 1
IMAGE_VIEWER := feh
endif
USE_PULSAR :=
ENABLE_GRPC :=
START_GROUP := -Wl,--start-group
END_GROUP := -Wl,--end-group
CV_PKG_CONFIG_PATH := $(shell pwd)/../opencv/install/lib/pkgconfig/
OPENCV_FLAGS := $(filter-out -lIconv::Iconv, \
$(shell PKG_CONFIG_PATH=$(CV_PKG_CONFIG_PATH) pkg-config opencv4 --libs --cflags $(if $(STATIC), --static)) \
)
default: qrtool qrtool.wx.wasm.br qrtool.web.js
qrtool: CXXFLAGS += -DWECHAT_QRCODE_USE_MODEL=1
qrtool: qrtool.cpp libqr.cpp \
$(if $(USE_PULSAR), mq_worker.cpp) \
base64.cpp mq_worker.h base64.h \
http.o \
$(if $(ENABLE_GRPC), fileprocess.o fileprocess.pb.o fileprocess.grpc.pb.o) \
Makefile
$(CXX) -o $@ \
$(if $(STATIC), -static) \
$(filter %.cpp %.o, $^) \
-DQRTOOL_MAIN=1 \
$(if $(USE_PULSAR), -lpulsar) \
$(CXXFLAGS) \
$(RPATH_FLAG) \
$(if $(STATIC), $(START_GROUP) -ljbig) \
$(OPENCV_FLAGS) \
$(if $(STATIC), $(END_GROUP)) \
-Wno-error=unused-function \
qrtool.zip: qrtool
rm -rf qrtool.zip-workdir
mkdir -p qrtool.zip-workdir
cp qrtool qrtool.zip-workdir/qrtool.$(shell git describe --always).x86_64
cd qrtool.zip-workdir && zip qrtool.zip qrtool.$(shell git describe --always).x86_64 && mv qrtool.zip ..
rm -rf qrtool.zip-workdir
angle: qrtool
./qrtool angle dataset/camera/warp-small.jpg
verify: qrtool
./qrtool verify ../dataset/similarity/19000-roi.jpg ../dataset/similarity/19000.jpg
./qrtool verify ../dataset/similarity/19006-roi.jpg ../dataset/similarity/19006.jpg
# ./qrtool verify ../dataset/local/scan-data/19687-roi.jpg ../dataset/local/scan-data/19687-frame.jpg
verify-neg: qrtool
./qrtool verify ../dataset/similarity/19000-roi.jpg ../dataset/similarity/19002.jpg
./qrtool verify ../dataset/similarity/19006-roi.jpg ../dataset/similarity/19002.jpg
verify-test: D := ../dataset/local/scan-data/
verify-test: qrtool FORCE
for roi in $(shell ls $D | grep roi.jpg | sort -R | head -n 100); do \
frame=$${roi/roi/frame}; \
cmd="./qrtool verify $D/$$roi $D/$$frame"; \
sim=$$($$cmd | grep similarity); \
echo "<div class=case><img class=roi src=$$roi /> <img class=frame src=$$frame /><h1>$$sim</h1>"; \
cat $D/$${roi/-roi.jpg/.txt}; \
echo "</div>"; \
done | tee $D/verify.html
echo '<style> div.case { border: 1px solid green; padding: 1rem; margin: 1rem; } img.frame { width: 200px } img.roi { display: block }' >> $D/verify.html
rectify: qrtool
./qrtool rectify dataset/camera/warp-small.jpg
roi: code=4295987837721
roi: qrtool
./qrtool roi dataset/scan/$(code).jpg
$(IMAGE_VIEWER) dataset/scan/$(code).jpg.roi.jpg
topleft: qrtool
./qrtool topleft ../dataset/scandata/18986.jpg
roi_bench: qrtool
begin=$$(date +%s); parallel -j10 ./qrtool roi_bench -- dataset/batches/*; end=$$(date +%s); nfiles=$$(find dataset/batches -type f | wc -l); echo total qps: $$((nfiles / (end - begin)))
imdecode: qrtool
./qrtool imdecode dataset/camera/warp-small.jpg
angle-bench: qrtool
time -f %e $(SHELL) -c '\
for i in $(shell seq 100); do \
$(library_path_prefix) \
./qrtool angle dataset/camera/warp-small.jpg; \
done'
neg: qrtool
set -e; \
for img in $(wildcard dataset/negative/*.jpg); do \
if ./qrtool angle $$img; then echo "negative image check failed: $$img"; exit 1; fi; \
done
detect: qrtool
./qrtool detect dataset/camera/warp-small.jpg
detect2: qrtool
./qrtool detect2 dataset/camera/warp-small.jpg
check: qrtool
./qrtool check dataset/camera/warp-small.jpg
diagonal: qrtool
./qrtool diagonal dataset/camera/warp-small.jpg
bench: qrtool
./qrtool bench dataset/camera/warp-small.jpg
memory: qrtool
valgrind ./qrtool bench dataset/camera/warp-small.jpg
fileprocess.o: fileprocess.grpc.pb.h
worker: qrtool
./qrtool roi_worker roi
worker_nop: qrtool
./qrtool roi_worker_nop roi
workers: qrtool
parallel -j8 ./qrtool roi_worker -- roi roi roi roi roi roi roi roi
grpc: qrtool
./qrtool grpc_server 0.0.0.0:32439
energy: qrtool FORCE
$(library_path_prefix) \
./qrtool energy dataset/roi/20231226/roi-1703563444.7468174.png
energy.html: qrtool FORCE
ls dataset/roi/20231224/* | sort -R | head -n 100 | while read x; do \
echo -n "<div>"; \
$(library_path_prefix) \
./qrtool energy $$x | tr -d '\n'; \
echo -n " <img src=\"$$x\" />"; \
echo "</div>"; \
done | sort -rn -k 2 | tee $@
google-chrome $@
hist.html: qrtool FORCE
ls dataset/roi/20231224/* | grep -v hist | sort -R | head -n 100 | while read x; do \
echo -n "<div>$$x"; \
echo -n " <img src=\"$$x\" /><img height=60 width=60 src=\""; \
$(library_path_prefix) \
./qrtool $$x | tr -d '\n'; \
echo "\"/></div>"; \
done | sort -rn -k 2 | tee $@
dft.html: qrtool FORCE
ls dataset/roi/20231224/* | grep -v dft | \
while read x; do \
echo -n "<div>$$x"; \
echo -n " <img src=\"$$x\" /><img height=60 width=60 src=\""; \
$(library_path_prefix) \
./qrtool $$x | tr -d '\n'; \
echo "\"/></div>"; \
done | tee $@
dft-compare.html: qrtool FORCE
(echo dataset/roi/20231224/roi-1703295645.003432.png; echo dataset/roi/20231224/roi-1703295645.365394.png) | \
while read x; do \
echo -n "<div>$$x"; \
echo -n " <img src=\"$$x\" /><img height=60 width=60 src=\""; \
$(library_path_prefix) \
./qrtool $$x | tr -d '\n'; \
echo "\"/></div>"; \
done | sort -rn -k 2 | tee $@
google-chrome $@
dft: qrtool FORCE
$(library_path_prefix) \
./qrtool dataset/camera/warp-small.jpg
qrtool.web.js: EMCC_FLAGS := \
-O3
qrtool.wx.js: EMCC_FLAGS := \
-O3 \
--pre-js pre.wx.js \
--post-js post.wx.js
SIMD128_OPTS := -msimd128
SIMD128_OPTS :=
qrtool.wx.js: pre.wx.js post.wx.js
qrtool.wx.js qrtool.web.js: qrtool_wasm.cpp libqr.cpp Makefile
emcc \
-o $@ -I$(CV_INSTALL_DIR)/include/opencv4 $(filter %.cpp, $^) \
$(addprefix $(CV_WASM_DIR)/lib/, \
libopencv_core.a \
libopencv_dnn.a \
libopencv_imgproc.a \
libopencv_wechat_qrcode.a \
) \
$(addprefix $(CV_WASM_DIR)/3rdparty/lib/, \
libzlib.a) \
'-sEXPORTED_FUNCTIONS=["_qrtool_angle","_malloc","_free"]' \
'-sEXPORTED_RUNTIME_METHODS=["ccall","cwrap","_wasm_call_ctors"]' \
"-sMIN_CHROME_VERSION=73" \
"-sMIN_SAFARI_VERSION=140100" \
-sALLOW_MEMORY_GROWTH=1 \
-sENVIRONMENT=web \
$(SIMD128_OPTS) \
-std=c++17 \
-fexceptions \
-lembind \
-g1 \
-sWASM=1 \
$(EMCC_FLAGS)
qrtool.wx.wasm.br: qrtool.wx.js
brotli -kf qrtool.wx.wasm
opencv: FORCE
./opencv/euphon/build-cpp.sh
wasm: FORCE
./opencv/euphon/build-wasm.sh
serve: qrtool FORCE
$(library_path_prefix) \
./server.py
deploy: FORCE
set -e; \
for kc in $(TARGET); do \
echo $$kc; \
kubectl --kubeconfig deploy/kubeconfig.$$kc set image deployment/alg alg=$(IMAGE); \
kubectl --kubeconfig deploy/kubeconfig.$$kc rollout status deployment alg; \
done
PROTO_DIR := ../../../cassia/estord/proto
fileprocess.grpc.pb.h fileprocess.grpc.pb.cc fileprocess.pb.h fileprocess.pb.cc: $(PROTO_DIR)/fileprocess.proto
protoc -I $(PROTO_DIR) --grpc_out=. --plugin=protoc-gen-grpc=`which grpc_cpp_plugin` $<
protoc -I $(PROTO_DIR) --cpp_out=. $<
install-scanner: qrtool.wx.wasm.br
@cp -v qrtool.wx.js qrtool.wx.wasm.br ../scanner/assets
@cp -v qrtool.wx.js ../scanner/worker
install-web: qrtool.web.wasm
@cp -v qrtool.web.js qrtool.web.wasm ../web/public/camera-4.0/js/
install: install-web install-scanner

211
alg/angle.cpp Normal file
View File

@ -0,0 +1,211 @@
#include <algorithm>
#include "libqr.h"
static void clear_connected(Mat &bin, Point p)
{
vector<Point> q;
q.push_back(p);
while (q.size()) {
auto p = q[q.size() - 1];
q.pop_back();
bin.at<uint8_t>(p.y, p.x) = 0;
for (int i = -1; i <= 1; i++) {
for (int j = -1; j <= 1; j++) {
int nx = p.x + i;
int ny = p.y + j;
if (nx < 0 || nx >= bin.cols || ny < 0 || ny >= bin.rows) {
continue;
}
if (bin.at<bool>(ny, nx)) {
q.push_back(Point(nx, ny));
}
}
}
}
}
static
vector<Point> find_points(Mat bin)
{
vector<Point> ret;
for (int x = 0; x < bin.cols; x++) {
for (int y = 0; y < bin.rows; y++) {
auto p = bin.at<uint8_t>(y, x);
if (!p) continue;
auto point = Point(x, y);
ret.push_back(point);
clear_connected(bin, point);
}
}
return ret;
}
static
bool in_center(Mat &bin, Point &p)
{
int margin = bin.rows * 2 / 10;
return p.x > margin && p.x < bin.cols - margin && p.y > margin && p.y <= bin.rows - margin;
}
static
float distance(Point &p, Point &q)
{
auto xdiff = p.x - q.x;
auto ydiff = p.y - q.y;
return xdiff * xdiff + ydiff * ydiff;
}
static
int find_closest(Point &p, vector<Point> &points, bool left, bool top)
{
int ret = -1;
for (int ii = 0; ii < points.size(); ii++) {
auto i = points[ii];
if (i.x == p.x && i.y == p.y) continue;
if (left && i.x > p.x) continue;
if (top && i.y > p.y) continue;
if (!left && i.x <= p.x) continue;
if (!top && i.y < p.y) continue;
if (ret < 0 || distance(p, points[ret]) > distance(p, i)) {
ret = ii;
}
}
return ret;
}
static
float find_angle(Point &p, vector<Point> &points)
{
// Find 4 dots in 4 quadrant (if any)
// Then find 2 closest on y axis
// Then calculate angle between those two
auto topleft = find_closest(p, points, true, true);
auto bottomright = find_closest(p, points, false, false);
if (topleft < 0 || bottomright < 0)
return -1;
auto a = points[topleft];
auto b = points[bottomright];
printf("point %d %d top left %d %d, bottom right %d %d\n", p.x, p.y, a.x, a.y, b.x, b.y);
if (a.y == b.y) return 0;
auto ret = atan((b.x - a.x) / (b.y - a.y)) * 180.0 / CV_PI;
if (ret < 0) ret += 90;
if (ret > 45) ret = 90 - ret;
return ret;
}
static
void angle_stat(vector<float> angles, float &median, float &variance)
{
std::sort(angles.begin(), angles.end());
float sum = 0;
for (auto x: angles) {
sum += x;
}
auto mid = angles.size() / 2;
median = angles[mid];
auto avg = sum / angles.size();
variance = 0;
for (auto x: angles) {
auto diff = x - avg;
variance += diff * diff;
}
}
float hough_lines_angle(Mat &img, string &err)
{
show(img);
vector<Vec3f> lines;
HoughLines(img, lines, 1, CV_PI / 180, 6, 0, 0);
for (auto x: lines) {
printf("line: %.1f %.1f %.1f\n", x[0], x[1] * 180.0 / CV_PI, x[2]);
}
if (!lines.size()) {
err = "cannot find lines in image";
return -1;
}
int total_weight = 0;
for (int i = 0; i < lines.size() && i < 5; i++) {
total_weight += lines[i][2];
}
int acc = 0;
float ret = 0;
for (int i = 0; i < lines.size(); i++) {
acc += lines[i][2];
if (acc >= total_weight / 2) {
ret = lines[i][1] * 180.0 / CV_PI;
break;
}
}
while (ret < 0) {
ret += 90;
}
while (ret > 90) {
ret -= 90;
}
if (ret > 45) {
ret = 90 - ret;
}
printf("angle: %f\n", ret);
return ret;
}
float emblem_detect_angle(Mat &gray, string &err)
{
Mat bin;
const int min_points = 30;
vector<Point> points;
auto kernel = getStructuringElement(MORPH_ELLIPSE, Size(3, 3));
adaptiveThreshold(gray, bin, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY_INV, 11, 2);
while (true) {
// In this loop we erode a "full" image in order to get enough detached components
points = find_points(bin.clone());
printf("points: %zu\n", points.size());
if (points.size() == 0) {
err = "cannot find enough points";
return -1;
}
if (points.size() > min_points) {
break;
}
erode(bin, bin, kernel);
}
while (true) {
// In this loop we further erode a "lean" image in order to get clarity until it's too much
Mat eroded;
erode(bin, eroded, kernel);
auto tmp = find_points(eroded.clone());
if (tmp.size() < min_points) {
printf("too much\n");
break;
}
bin = eroded.clone();
}
return hough_lines_angle(bin, err);
vector<float> angles;
for (auto p: points) {
if (!in_center(bin, p)) {
continue;
}
auto angle = find_angle(p, points);
if (angle >= 0) {
printf("found angle %f\n", angle);
angles.push_back(angle);
}
}
if (!angles.size()) {
err = "cannot find point to calculate angle";
return -1;
}
float med, var;
angle_stat(angles, med, var);
printf("med: %f, var: %f\n", med, var);
return med;
}

95
alg/base64.cpp Normal file
View File

@ -0,0 +1,95 @@
#include "base64.h"
#include <iostream>
static const std::string base64_chars =
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"abcdefghijklmnopqrstuvwxyz"
"0123456789+/";
static inline bool is_base64(BYTE c) {
return (isalnum(c) || (c == '+') || (c == '/'));
}
std::string base64_encode(BYTE const* buf, unsigned int bufLen) {
std::string ret;
int i = 0;
int j = 0;
BYTE char_array_3[3];
BYTE char_array_4[4];
while (bufLen--) {
char_array_3[i++] = *(buf++);
if (i == 3) {
char_array_4[0] = (char_array_3[0] & 0xfc) >> 2;
char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4);
char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6);
char_array_4[3] = char_array_3[2] & 0x3f;
for(i = 0; (i <4) ; i++)
ret += base64_chars[char_array_4[i]];
i = 0;
}
}
if (i)
{
for(j = i; j < 3; j++)
char_array_3[j] = '\0';
char_array_4[0] = (char_array_3[0] & 0xfc) >> 2;
char_array_4[1] = ((char_array_3[0] & 0x03) << 4) + ((char_array_3[1] & 0xf0) >> 4);
char_array_4[2] = ((char_array_3[1] & 0x0f) << 2) + ((char_array_3[2] & 0xc0) >> 6);
char_array_4[3] = char_array_3[2] & 0x3f;
for (j = 0; (j < i + 1); j++)
ret += base64_chars[char_array_4[j]];
while((i++ < 3))
ret += '=';
}
return ret;
}
std::vector<BYTE> base64_decode(std::string const& encoded_string) {
int in_len = encoded_string.size();
int i = 0;
int j = 0;
int in_ = 0;
BYTE char_array_4[4], char_array_3[3];
std::vector<BYTE> ret;
while (in_len-- && ( encoded_string[in_] != '=') && is_base64(encoded_string[in_])) {
char_array_4[i++] = encoded_string[in_]; in_++;
if (i ==4) {
for (i = 0; i <4; i++)
char_array_4[i] = base64_chars.find(char_array_4[i]);
char_array_3[0] = (char_array_4[0] << 2) + ((char_array_4[1] & 0x30) >> 4);
char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2);
char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3];
for (i = 0; (i < 3); i++)
ret.push_back(char_array_3[i]);
i = 0;
}
}
if (i) {
for (j = i; j <4; j++)
char_array_4[j] = 0;
for (j = 0; j <4; j++)
char_array_4[j] = base64_chars.find(char_array_4[j]);
char_array_3[0] = (char_array_4[0] << 2) + ((char_array_4[1] & 0x30) >> 4);
char_array_3[1] = ((char_array_4[1] & 0xf) << 4) + ((char_array_4[2] & 0x3c) >> 2);
char_array_3[2] = ((char_array_4[2] & 0x3) << 6) + char_array_4[3];
for (j = 0; (j < i - 1); j++) ret.push_back(char_array_3[j]);
}
return ret;
}

11
alg/base64.h Normal file
View File

@ -0,0 +1,11 @@
#ifndef _BASE64_H_
#define _BASE64_H_
#include <vector>
#include <string>
typedef unsigned char BYTE;
std::string base64_encode(BYTE const* buf, unsigned int bufLen);
std::vector<BYTE> base64_decode(std::string const&);
#endif

View File

@ -0,0 +1,20 @@
apiVersion: v1
clusters:
- cluster:
insecure-skip-tls-verify: true
server: https://derby.euphon.net:6443
name: default
contexts:
- context:
cluster: default
namespace: emblem
user: default
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: default
user:
client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJrakNDQVRlZ0F3SUJBZ0lJZW9uUVdIaE5mcGd3Q2dZSUtvWkl6ajBFQXdJd0l6RWhNQjhHQTFVRUF3d1kKYXpOekxXTnNhV1Z1ZEMxallVQXhOamsyTURjd016ZzNNQjRYRFRJek1Ea3pNREV3TXprME4xb1hEVEkwTURreQpPVEV3TXprME4xb3dNREVYTUJVR0ExVUVDaE1PYzNsemRHVnRPbTFoYzNSbGNuTXhGVEFUQmdOVkJBTVRESE41CmMzUmxiVHBoWkcxcGJqQlpNQk1HQnlxR1NNNDlBZ0VHQ0NxR1NNNDlBd0VIQTBJQUJHZWpuRkFjK2hPRTBtNEMKT1Z3NkVNTG85SGZJMU4vVDYrTC9zRzR0OHA0WWI5VWhiTnlhVC9HcjlwVEhpZG5zS21sT3ZiZWZPR1NSV3JlbQpEcEhzNjEyalNEQkdNQTRHQTFVZER3RUIvd1FFQXdJRm9EQVRCZ05WSFNVRUREQUtCZ2dyQmdFRkJRY0RBakFmCkJnTlZIU01FR0RBV2dCVGd0NFp1aUp3YlZlZ0xUalFMSGdrVzFVR2JqREFLQmdncWhrak9QUVFEQWdOSkFEQkcKQWlFQTFXeWNZRW5WbEs2OG1GZGZBUmlKdytBUytSQ0swSkl3M2hLZXJmNlV4WE1DSVFDUU85cGROOUgxMzBOOApkUncvMHJUbHo3Q1J0ZmZObEdUdjNDeE1lRVVUb3c9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCi0tLS0tQkVHSU4gQ0VSVElGSUNBVEUtLS0tLQpNSUlCZHpDQ0FSMmdBd0lCQWdJQkFEQUtCZ2dxaGtqT1BRUURBakFqTVNFd0h3WURWUVFEREJock0zTXRZMnhwClpXNTBMV05oUURFMk9UWXdOekF6T0Rjd0hoY05Nak13T1RNd01UQXpPVFEzV2hjTk16TXdPVEkzTVRBek9UUTMKV2pBak1TRXdId1lEVlFRRERCaHJNM010WTJ4cFpXNTBMV05oUURFMk9UWXdOekF6T0Rjd1dUQVRCZ2NxaGtqTwpQUUlCQmdncWhrak9QUU1CQndOQ0FBUTJEUWxMMWxpM0cyU29pa0t1MGpIM2YwQzZYdWlxc1U0bVBzN0FqR1VPCnlGNnNra0hVamg2ZldPMDZBZ3NrUkdQQ3FaOFpwQjlDL2doZDlqVTl2ZnhubzBJd1FEQU9CZ05WSFE4QkFmOEUKQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVU0TGVHYm9pY0cxWG9DMDQwQ3g0SgpGdFZCbTR3d0NnWUlLb1pJemowRUF3SURTQUF3UlFJaEFLdlNQVkRlTmtBZFRUR0pzNWNLRGFmSStCYUR4ZmhvCm1hM082V0hxK05JeEFpQnMxVTBsVlNRWjRYb0lZWXJ4OHBMSm5EUzVjSGI4cmRLTndaTjZEcExCSlE9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
client-key-data: LS0tLS1CRUdJTiBFQyBQUklWQVRFIEtFWS0tLS0tCk1IY0NBUUVFSUd4WU5mWk5GWVFJRG9zRVZjM0JiVUFNcVFoV0wrNVpndHIwZ0R2SUJES0tvQW9HQ0NxR1NNNDkKQXdFSG9VUURRZ0FFWjZPY1VCejZFNFRTYmdJNVhEb1F3dWowZDhqVTM5UHI0dit3YmkzeW5oaHYxU0ZzM0pwUAo4YXYybE1lSjJld3FhVTY5dDU4NFpKRmF0NllPa2V6clhRPT0KLS0tLS1FTkQgRUMgUFJJVkFURSBLRVktLS0tLQo=

View File

@ -0,0 +1,20 @@
apiVersion: v1
clusters:
- cluster:
insecure-skip-tls-verify: true
server: https://192.168.0.253:6443
name: default
contexts:
- context:
cluster: default
namespace: emblem
user: default
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: default
user:
client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJrakNDQVRlZ0F3SUJBZ0lJTXBPbFZsSkM4YUV3Q2dZSUtvWkl6ajBFQXdJd0l6RWhNQjhHQTFVRUF3d1kKYXpOekxXTnNhV1Z1ZEMxallVQXhOekV3TURJeU9EYzRNQjRYRFRJME1ETXdPVEl5TWpFeE9Gb1hEVEkxTURNdwpPVEl5TWpFeE9Gb3dNREVYTUJVR0ExVUVDaE1PYzNsemRHVnRPbTFoYzNSbGNuTXhGVEFUQmdOVkJBTVRESE41CmMzUmxiVHBoWkcxcGJqQlpNQk1HQnlxR1NNNDlBZ0VHQ0NxR1NNNDlBd0VIQTBJQUJQd3FiQ0dDZkdiRWhxem0KUjBoU2ZTTEh1OWxjdEdKb2laTTZYUlNJRlJYaEZJd043V2w0NXBPbSttQm9ldG92UWMyL3hyS2kwYmhMeXhUbApRbGk2bFEyalNEQkdNQTRHQTFVZER3RUIvd1FFQXdJRm9EQVRCZ05WSFNVRUREQUtCZ2dyQmdFRkJRY0RBakFmCkJnTlZIU01FR0RBV2dCUms2aXJOdDk4NzNBdnUwTjRvWmVKZ3lIVDE5akFLQmdncWhrak9QUVFEQWdOSkFEQkcKQWlFQXNRZzZxQ2lMOXpPazdTenJrbUNWSjA4Q1M4aWUzaW5FRDRNUVd0bVVVU3dDSVFEUG5RY0pNQVR2VlJoZwo4NEhOaVh2RVhWbEN1SHlOM1V6MEdrT2sveEVoK2c9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCi0tLS0tQkVHSU4gQ0VSVElGSUNBVEUtLS0tLQpNSUlCZHpDQ0FSMmdBd0lCQWdJQkFEQUtCZ2dxaGtqT1BRUURBakFqTVNFd0h3WURWUVFEREJock0zTXRZMnhwClpXNTBMV05oUURFM01UQXdNakk0Tnpnd0hoY05NalF3TXpBNU1qSXlNVEU0V2hjTk16UXdNekEzTWpJeU1URTQKV2pBak1TRXdId1lEVlFRRERCaHJNM010WTJ4cFpXNTBMV05oUURFM01UQXdNakk0Tnpnd1dUQVRCZ2NxaGtqTwpQUUlCQmdncWhrak9QUU1CQndOQ0FBU2dQWGRKYWRDamh5SEQ4TlRUaS9ZRitKcDZUaXNpQTRyV2Q5OUlXejdLCnM5Rm5OV2NUVHY4SHNjQkM0TVpLRkwwM0dYbFU4SENTNk9pY1ZmdU12WHFFbzBJd1FEQU9CZ05WSFE4QkFmOEUKQkFNQ0FxUXdEd1lEVlIwVEFRSC9CQVV3QXdFQi96QWRCZ05WSFE0RUZnUVVaT29xemJmZk85d0w3dERlS0dYaQpZTWgwOWZZd0NnWUlLb1pJemowRUF3SURTQUF3UlFJZ0RZQ2JubHpiaXorMVlmclZCQmV6VnVWSTB5Kzl1N3RJCks2RXFHYlFKVXAwQ0lRRHRuZjA2NFFGWExaaGtLWiszKy9KdnJieTdmYU4rVTV6ZEdyOWFCMWQ1MXc9PQotLS0tLUVORCBDRVJUSUZJQ0FURS0tLS0tCg==
client-key-data: LS0tLS1CRUdJTiBFQyBQUklWQVRFIEtFWS0tLS0tCk1IY0NBUUVFSUc3bXZVNmlEcGZMam1lcmRzVG4wQXhIczlnL1QzUzdjb3pjU280bStJS21vQW9HQ0NxR1NNNDkKQXdFSG9VUURRZ0FFL0Nwc0lZSjhac1NHck9aSFNGSjlJc2U3MlZ5MFltaUprenBkRklnVkZlRVVqQTN0YVhqbQprNmI2WUdoNjJpOUJ6Yi9Hc3FMUnVFdkxGT1ZDV0xxVkRRPT0KLS0tLS1FTkQgRUMgUFJJVkFURSBLRVktLS0tLQo=

20
alg/deploy/kubeconfig.zy Normal file
View File

@ -0,0 +1,20 @@
apiVersion: v1
clusters:
- cluster:
insecure-skip-tls-verify: true
server: https://euphon.cloud:6443
name: default
contexts:
- context:
cluster: default
namespace: emblem
user: default
name: default
current-context: default
kind: Config
preferences: {}
users:
- name: default
user:
client-certificate-data: LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJrVENDQVRlZ0F3SUJBZ0lJVTI1b0lYUkIvM3N3Q2dZSUtvWkl6ajBFQXdJd0l6RWhNQjhHQTFVRUF3d1kKYXpOekxXTnNhV1Z1ZEMxallVQXhOekF6TURZNE5ETXpNQjRYRFRJek1USXlNREV3TXpNMU0xb1hEVEkwTVRJeApPVEV3TXpNMU0xb3dNREVYTUJVR0ExVUVDaE1PYzNsemRHVnRPbTFoYzNSbGNuTXhGVEFUQmdOVkJBTVRESE41CmMzUmxiVHBoWkcxcGJqQlpNQk1HQnlxR1NNNDlBZ0VHQ0NxR1NNNDlBd0VIQTBJQUJQaWs4UnBCcEVKS0NYbG8KdnB3UnJ0NEVKaHBsbUMvUW1zT3JwTUlYTC93amdnZkwrb0MvSVQ0VUtuOWZ0cmZsdlBjWEhHWVprYWgvd210QQo2OS9rUWlHalNEQkdNQTRHQTFVZER3RUIvd1FFQXdJRm9EQVRCZ05WSFNVRUREQUtCZ2dyQmdFRkJRY0RBakFmCkJnTlZIU01FR0RBV2dCU1grN1IzbDR0b0luMkp1bk9Cd1VCN1E4dlpwakFLQmdncWhrak9QUVFEQWdOSUFEQkYKQWlFQXNiTjVVUVRWSnhrRFlHbDJCUDhrUkF4Tk42OXRXMzNpdlBldXZVTXViM01DSUhoUzNab3d2dU9rci9KRAoyaFZhM25mQ3BWdzNrZ0NiVjd3a3RpVGdpaHNaCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0KLS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCk1JSUJlRENDQVIyZ0F3SUJBZ0lCQURBS0JnZ3Foa2pPUFFRREFqQWpNU0V3SHdZRFZRUUREQmhyTTNNdFkyeHAKWlc1MExXTmhRREUzTURNd05qZzBNek13SGhjTk1qTXhNakl3TVRBek16VXpXaGNOTXpNeE1qRTNNVEF6TXpVegpXakFqTVNFd0h3WURWUVFEREJock0zTXRZMnhwWlc1MExXTmhRREUzTURNd05qZzBNek13V1RBVEJnY3Foa2pPClBRSUJCZ2dxaGtqT1BRTUJCd05DQUFSZzlOYnRuZ1RDSzhIVnV5NHNpUzQ2dFE5b3pmY0dlclI2ZlFxbmVab0EKemlqRFdnaHhEWnNOTzVuVWVaWHpiWDgrbVdzNUIyRWtlaHZZeWhnRzY4MTVvMEl3UURBT0JnTlZIUThCQWY4RQpCQU1DQXFRd0R3WURWUjBUQVFIL0JBVXdBd0VCL3pBZEJnTlZIUTRFRmdRVWwvdTBkNWVMYUNKOWlicHpnY0ZBCmUwUEwyYVl3Q2dZSUtvWkl6ajBFQXdJRFNRQXdSZ0loQUoyamlZVGFKSHptUDJ0NjZmRlVkT3hmQjlNRnRNb2YKazN5dnNFZ1YxOWY4QWlFQXVIVUtjVHljVStzbnIxUWxhWjBoSFZ5OW53UGp3M3ZVdlVJWU9kTGZwNjA9Ci0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K
client-key-data: LS0tLS1CRUdJTiBFQyBQUklWQVRFIEtFWS0tLS0tCk1IY0NBUUVFSUZwRG9kSU9Kbmg3UUVlYWQ4MXdHdnkzaHNEQkhjbG5NQXEzL3pzUHlaVTFvQW9HQ0NxR1NNNDkKQXdFSG9VUURRZ0FFK0tUeEdrR2tRa29KZVdpK25CR3UzZ1FtR21XWUw5Q2F3NnVrd2hjdi9DT0NCOHY2Z0w4aApQaFFxZjErMnQrVzg5eGNjWmhtUnFIL0NhMERyMytSQ0lRPT0KLS0tLS1FTkQgRUMgUFJJVkFURSBLRVktLS0tLQo=

84
alg/fileprocess.cpp Normal file
View File

@ -0,0 +1,84 @@
#include "fileprocess.h"
#include "fileprocess.grpc.pb.h"
#include <grpc++/server_builder.h>
using namespace std;
using namespace grpc;
using namespace fileprocess;
class FileProcessServer final : public FileProcess::Service {
handler_fn _handle_image;
Status ProcessFiles(ServerContext* context, const Files* request,
Output *resp) override {
printf("process files\n");
for (auto file: request->files()) {
printf("file: %s\n", file.path().c_str());
string output_path;
vector<uint8_t> output;
vector<uint8_t> input(file.data().begin(), file.data().end());
auto r = _handle_image(file.path(), input, output_path, output);
auto d = resp->add_files();
if (r) {
d->set_succeeded(false);
d->set_error("Failed to process image");
} else {
d->set_succeeded(true);
d->set_path(output_path);
string data(output.begin(), output.end());
d->set_data(data);
}
}
printf("done\n");
return Status::OK;
}
#if 0
Status ProcessArchive(ServerContext* context, ServerReaderWriter<Output, ArchiveFile> *stream) override {
ArchiveFile request;
while (stream->Read(&request)) {
if (request.data().size() <= 0) continue;
string output_path;
vector<uint8_t> output;
vector<uint8_t> input(request.data().begin(), request.data().end());
string path = request.path();
path += string("-files/") + request.path_in_archive();
auto r = _handle_image(path, input, output_path, output);
Output d;
if (r) {
d.set_succeeded(false);
string error = "Failed to process image " + request.path_in_archive();
d.set_error(error);
} else {
d.set_succeeded(true);
d.set_path(output_path);
string data(output.begin(), output.end());
d.set_data(data);
}
stream->Write(d);
}
return Status::OK;
}
#endif
public:
FileProcessServer(handler_fn handle_image) :
_handle_image(handle_image)
{
}
};
int run_server(const string &server_addr, handler_fn handle_image) {
FileProcessServer service(handle_image);
ServerBuilder builder;
builder.AddListeningPort(server_addr, grpc::InsecureServerCredentials());
builder.RegisterService(&service);
builder.SetMaxSendMessageSize(128 * 1024 * 1024);
std::unique_ptr<Server> server(builder.BuildAndStart());
std::cout << "Server listening on " << server_addr << std::endl;
server->Wait();
return 0;
}

13
alg/fileprocess.h Normal file
View File

@ -0,0 +1,13 @@
#ifndef _FILEPROCESS_H_
#define _FILEPROCESS_H_
#include <stdint.h>
#include <string>
#include <vector>
typedef int (*handler_fn)(const std::string &input_path,
const std::vector<uint8_t> &input,
std::string &output_path,
std::vector<uint8_t> &output);
int run_server(const std::string &server_addr, handler_fn handle_image);
#endif

36
alg/http.cc Normal file
View File

@ -0,0 +1,36 @@
#include <string>
#include <iostream>
#include <stdlib.h>
#include "httplib.h"
#include "http.h"
using namespace std;
int start_http_server(int port, http_handle_file handle_file)
{
httplib::Server svr;
svr.Post("/roi", [handle_file](const httplib::Request &req, httplib::Response &res) {
auto f = req.get_file_value("file");
vector<uint8_t> input(f.content.begin(), f.content.end());
vector<uint8_t> output;
if (!input.size()) {
res.status = 400;
res.set_content("file is missing\n", "text/plain");
} else {
int r = handle_file(input, output);
if (r) {
res.status = 400;
res.set_content("failed to process file\n", "text/plain");
} else {
res.status = 200;
res.set_content((char *)&output[0], output.size(), "image/jpeg");
}
}
});
cout << "starting server on port " << port << endl;
svr.listen("0.0.0.0", port);
return 0;
}

10
alg/http.h Normal file
View File

@ -0,0 +1,10 @@
#ifndef _HTTP_H_
#define _HTTP_H_
#include <string>
#include <vector>
typedef int (*http_handle_file)(const std::vector<uint8_t> &input,
std::vector<uint8_t> &output);
int start_http_server(int port, http_handle_file handle_file);
#endif

9464
alg/httplib.h Normal file

File diff suppressed because it is too large Load Diff

720
alg/libqr.cpp Normal file
View File

@ -0,0 +1,720 @@
#include <iostream>
#include <string>
#include "libqr.h"
#include "opencv2/objdetect.hpp"
#include "opencv2/wechat_qrcode.hpp"
#include "string_format.h"
using namespace std;
using namespace cv;
static
vector<Point> transform_image(Mat &in, vector<Point> qr_points, Mat &out)
{
Mat src = (Mat_<float>(4, 2) <<
qr_points[0].x, qr_points[0].y,
qr_points[1].x, qr_points[1].y,
qr_points[2].x, qr_points[2].y,
qr_points[3].x, qr_points[3].y
);
int min_x = qr_points[0].x;
int min_y = qr_points[0].y;
int max_x = qr_points[0].x;
int max_y = qr_points[0].y;
for (auto p: qr_points) {
min_x = min(p.x, min_x);
min_y = min(p.y, min_y);
max_x = max(p.x, max_x);
max_y = max(p.y, max_y);
}
Mat dst = (Mat_<float>(4, 2) <<
min_x, min_y,
max_x, min_y,
max_x, max_y,
min_x, max_y);
Mat m = getPerspectiveTransform(src, dst);
warpPerspective(in, out, m, in.size());
vector<Point> ret;
ret.push_back(Point(min_x, min_y));
ret.push_back(Point(max_x, min_y));
ret.push_back(Point(max_x, max_y));
ret.push_back(Point(min_x, max_y));
return ret;
}
bool detect_qr(ProcessState &ps, float margin_ratio, bool warp, string &err)
{
#if WECHAT_QRCODE_USE_MODEL
auto wr = wechat_qrcode::WeChatQRCode(
"wechat_qrcode/detect.prototxt",
"wechat_qrcode/detect.caffemodel",
"wechat_qrcode/sr.prototxt",
"wechat_qrcode/sr.caffemodel");
#else
auto wr = wechat_qrcode::WeChatQRCode();
#endif
vector<Mat> qrs;
auto r = wr.detectAndDecode(ps.preprocessed, qrs);
if (!r.size()) {
err = "qr not detected";
return false;
}
ps.qrcode = r[0];
auto rect = qrs[0];
vector<Point> qr_points;
qr_points.push_back(Point(rect.at<float>(0, 0) / ps.scale, rect.at<float>(0, 1) / ps.scale));
qr_points.push_back(Point(rect.at<float>(1, 0) / ps.scale, rect.at<float>(1, 1) / ps.scale));
qr_points.push_back(Point(rect.at<float>(2, 0) / ps.scale, rect.at<float>(2, 1) / ps.scale));
qr_points.push_back(Point(rect.at<float>(3, 0) / ps.scale, rect.at<float>(3, 1) / ps.scale));
ps.qr_points = qr_points;
Mat warped;
vector<Point> warped_qr_points;
if (warp) {
warped_qr_points = transform_image(*ps.orig, qr_points, warped);
} else {
warped = *ps.orig;
warped_qr_points = qr_points;
}
int min_x = warped_qr_points[0].x;
int min_y = warped_qr_points[0].y;
int max_x = min_x;
int max_y = min_y;
for (auto p: warped_qr_points) {
min_x = min(p.x, min_x);
min_y = min(p.y, min_y);
max_x = max(p.x, max_x);
max_y = max(p.y, max_y);
}
int margin = (max_x - min_x) * margin_ratio;
if (min_y < margin || min_x < margin || max_x + margin >= warped.cols || max_y + margin >= warped.rows) {
err = "qr margin too small";
return false;
}
int qr_width = max_x - min_x;
int qr_height = max_y - min_y;
if (qr_width < 200 && qr_height < 200 && qr_width < ps.orig->cols * 0.5 && qr_height < ps.orig->rows * 0.5) {
printf("(%d, %d) in (%d, %d)\n", qr_width, qr_height, ps.orig->cols, ps.orig->rows);
err = "qr too small";
return false;
}
Rect qr_rect(min_x, min_y, max_x - min_x, max_y - min_y);
ps.qr_straighten = warped(qr_rect);
Rect qr_with_margin_rect(min_x - margin, min_y - margin,
max_x - min_x + margin * 2,
max_y - min_y + margin * 2);
ps.straighten = warped(qr_with_margin_rect);
Mat g;
cvtColor(ps.straighten, g, COLOR_BGR2GRAY);
equalizeHist(g, g);
Rect dot_rect;
dot_rect.x = 0;
dot_rect.y = 0;
dot_rect.width = margin / 2;
dot_rect.height = margin / 2;
ps.dot_area = ps.straighten(dot_rect);
Mat dot_area_gray = g(dot_rect);
resize(dot_area_gray, ps.dot_area_gray, Size(64, 64));
return true;
}
bool preprocess(ProcessState &ps)
{
Mat gray;
cvtColor(*ps.orig, gray, COLOR_BGR2GRAY);
ps.scale = 1.0;
const float size_cap = 512;
if (ps.orig->rows > size_cap) {
ps.scale = size_cap / ps.orig->rows;
}
if (ps.orig->cols > ps.orig->rows && ps.orig->cols > size_cap) {
ps.scale = size_cap / ps.orig->cols;
}
resize(gray, ps.preprocessed, Size(), ps.scale, ps.scale);
return true;
}
struct EnergyGradient {
double x;
double y;
};
static
EnergyGradient energy_gradient(Mat &gray_img)
{
Mat smd_image_x, smd_image_y, G;
Mat kernel_x(3, 3, CV_32F, Scalar(0));
kernel_x.at<float>(1, 2) = -1.0;
kernel_x.at<float>(1, 1) = 1.0;
Mat kernel_y(3, 3, CV_32F, Scalar(0));
kernel_y.at<float>(1, 1) = 1.0;
kernel_y.at<float>(2, 1) = -1.0;
filter2D(gray_img, smd_image_x, gray_img.depth(), kernel_x);
filter2D(gray_img, smd_image_y, gray_img.depth(), kernel_y);
multiply(smd_image_x, smd_image_x, smd_image_x);
multiply(smd_image_y, smd_image_y, smd_image_y);
EnergyGradient ret = { mean(smd_image_x)[0], mean(smd_image_y)[0], };
return ret;
}
static
bool check_blur_by_energy_gradient(Mat &gray, string &err)
{
const int thres = 85;
auto a = energy_gradient(gray);
float angle = 45;
auto m = getRotationMatrix2D(Point2f(gray.cols / 2, gray.rows / 2), angle, 1.0);
Mat rotated;
warpAffine(gray, rotated, m, gray.size());
auto b = energy_gradient(rotated);
auto diffa = fabs(a.x - a.y);
auto diffb = fabs(b.x - b.y);
auto diffa_percent = 100 * diffa / max(a.x, a.y);
auto diffb_percent = 100 * diffb / max(b.x, b.y);
bool ret =
((a.x > thres && a.y > thres) || (b.x > thres && b.y > thres)) &&
diffa_percent < 15 && diffb_percent < 15;
cout << "energy: "
+ to_string(a.x) + " "
+ to_string(a.y) + " "
+ to_string(b.x) + " "
+ to_string(b.y) << endl;
if (!ret) {
err = "energy: "
+ to_string(a.x) + " "
+ to_string(a.y) + " "
+ to_string(b.x) + " "
+ to_string(b.y);
}
return ret;
}
double laplacian(Mat &gray, string &err)
{
int ddepth = CV_16S;
Mat check, lap;
GaussianBlur(gray, check, Size(5, 5), 0, 0, BORDER_DEFAULT);
Laplacian(check, lap, ddepth, 3);
convertScaleAbs(lap, lap);
Mat mean, stddev;
meanStdDev(lap, mean, stddev);
if (stddev.cols * stddev.rows == 1) {
double area = gray.rows * gray.cols;
double sd = stddev.at<double>(0, 0);
double var = sd * sd;
return var / area;
}
err = "wrong shape of stddev result";
return -1;
}
static
bool check_blur_by_laplacian(ProcessState &ps, Mat &gray, string &err)
{
auto var = laplacian(gray, err);
if (var < 0) return false;
ps.clarity = var;
if (var <= ps.laplacian_thres) {
err = string_format("image (%d x %d) too blurry: %lf <= %lf",
gray.cols, gray.rows,
var, ps.laplacian_thres
);
return false;
}
return true;
}
static
bool check_blur(ProcessState &ps, Mat &gray, string &err)
{
bool use_energy_gradient = false;
if (use_energy_gradient) {
return check_blur_by_energy_gradient(gray, err);
}
return check_blur_by_laplacian(ps, gray, err);
}
#define COUNT_COMPONENTS 0
#if COUNT_COMPONENTS
static bool is_valid_pattern(Mat &img)
{
Mat labels;
Mat stats;
Mat centroids;
connectedComponentsWithStats(img, labels, stats, centroids);
int valid = 0;
for (auto i = 0; i < stats.rows; i++) {
int area = stats.at<int>(i, CC_STAT_AREA);
if (area > 5) {
valid++;
}
}
return valid > 25;
}
#endif
static
int find_score(Mat &img)
{
int ret = 0;
for (int row = 0; row < img.rows; row++) {
int row_sum = 0;
for (int col = 0; col < img.cols; col++) {
auto p = img.at<bool>(row, col);
if (p) {
row_sum += 1;
}
}
if (row_sum) {
ret += 1;
}
}
return ret;
}
static void clear_connected(Mat &bin, Point p)
{
vector<Point> q;
q.push_back(p);
while (q.size()) {
auto p = q[q.size() - 1];
q.pop_back();
bin.at<uint8_t>(p.y, p.x) = 0;
for (int i = -1; i <= 1; i++) {
for (int j = -1; j <= 1; j++) {
int nx = p.x + i;
int ny = p.y + j;
if (nx < 0 || nx >= bin.cols || ny < 0 || ny >= bin.rows) {
continue;
}
if (bin.at<bool>(ny, nx)) {
q.push_back(Point(nx, ny));
}
}
}
}
}
static
vector<Point> find_points(Mat bin)
{
vector<Point> ret;
for (int x = 0; x < bin.cols; x++) {
for (int y = 0; y < bin.rows; y++) {
auto p = bin.at<uint8_t>(y, x);
if (!p) continue;
auto point = Point(x, y);
ret.push_back(point);
clear_connected(bin, point);
}
}
return ret;
}
static
int adaptive_erode(Mat &bin, Mat &eroded, string &err)
{
auto kernel = getStructuringElement(MORPH_ELLIPSE, Size(3, 3));
const int min_points = 25;
int max_erodes = 5;
printf("adaptiveThreshold\n");
eroded = bin.clone();
while (max_erodes-- > 0) {
// In this loop we erode a "full" image in order to get enough detached components
auto points = find_points(bin.clone());
printf("points: %zu\n", points.size());
if (points.size() == 0) {
err = "cannot find enough points";
return -1;
}
if (points.size() > min_points) {
break;
}
erode(eroded, eroded, kernel);
}
while (max_erodes-- > 0) {
// In this loop we further erode a "lean" image in order to get clarity until it's too much
Mat next;
erode(eroded, next, kernel);
auto points = find_points(next.clone());
if (points.size() < min_points) {
break;
}
eroded = next;
}
return 0;
}
static
int emblem_detect_angle(Mat &gray, bool check_orthogonal, string &err)
{
Mat bin;
int min_score = gray.cols;
int max_score = 0;
int lowest_score_angle = -1;
adaptiveThreshold(gray, bin, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY_INV, 11, 2);
Mat inverted;
bitwise_not(bin, inverted);
const int MAX_ROT_ANGLE = 180;
int scores[MAX_ROT_ANGLE] = { 0 };
const int score_diff_thres = 5;
Mat eroded;
adaptive_erode(bin, eroded, err);
for (int angle = 0; angle < MAX_ROT_ANGLE; angle += 1) {
auto m = getRotationMatrix2D(Point2f(gray.cols / 2, gray.rows / 2), angle, 1.0);
Mat rotated;
warpAffine(eroded, rotated, m, gray.size());
int score = find_score(rotated);
scores[angle] = score;
if (score < min_score) {
lowest_score_angle = angle;
}
min_score = min(score, min_score);
max_score = max(max_score, score);
}
if (max_score - min_score > score_diff_thres) {
int orthogonal_angle = lowest_score_angle + 90;
if (orthogonal_angle > 180) {
orthogonal_angle -= 180;
}
int orthogonal_score = scores[orthogonal_angle];
printf("lowest_score_angle %d, min score %d, max score %d, orthogonal_angle %d, orthogonal score: %d\n",
lowest_score_angle, min_score, max_score, orthogonal_angle, orthogonal_score);
lowest_score_angle = lowest_score_angle > 90 ? lowest_score_angle - 90 : lowest_score_angle;
if (lowest_score_angle > 45)
lowest_score_angle = 90 - lowest_score_angle;
if (max_score - orthogonal_score > score_diff_thres || !check_orthogonal) {
return lowest_score_angle;
}
}
return -1;
}
bool emblem_dot_angle(ProcessState &ps, InputArray in, float &angle, string &qrcode, string &err)
{
try {
ps.orig = (Mat *)in.getObj();
preprocess(ps);
if (!detect_qr(ps, 0.20, true, err)) {
err = "detect_qr: " + err;
return false;
}
qrcode = ps.qrcode;
if (!check_blur(ps, ps.dot_area_gray, err)) {
return false;
}
int a = emblem_detect_angle(ps.dot_area_gray, false, err);
if (a > 0) {
angle = a;
return true;
} else {
err = "cannot detect angle";
return false;
}
} catch (const std::exception &exc) {
std::cout << exc.what() << std::endl;
err = "exception";
return false;
} catch (...) {
err = "unknown error";
return false;
}
}
static
Mat adaptive_gray(Mat &img)
{
Mat ret;
Mat mean, stddev;
Mat channels[3];
Mat hsv_img;
meanStdDev(img, mean, stddev);
int bgr_max_std_channel = 0;
float bgr_max_std = stddev.at<float>(0, 0);
for (int i = 1; i < 3; i++) {
auto nv = stddev.at<float>(0, i);
if (nv > bgr_max_std_channel) {
bgr_max_std_channel = i;
bgr_max_std = nv;
}
}
cvtColor(img, hsv_img, COLOR_BGR2HSV);
meanStdDev(img, hsv_img, stddev);
int hsv_max_std_channel = 0;
float hsv_max_std = stddev.at<float>(0, 0);
for (int i = 1; i < 3; i++) {
auto nv = stddev.at<float>(0, i);
if (nv > hsv_max_std_channel) {
hsv_max_std_channel = i;
hsv_max_std = nv;
}
}
if (hsv_max_std > bgr_max_std) {
split(hsv_img, channels);
printf("using hsv channel %d\n", hsv_max_std_channel);
ret = channels[hsv_max_std_channel];
} else {
split(img, channels);
printf("using rgb channel %d\n", bgr_max_std_channel);
ret = channels[bgr_max_std_channel];
}
return ret;
}
static
bool cell_in_bg(int cell_x, int cell_y)
{
return
(cell_x == 1 && (cell_y > 0 && cell_y < 6)) ||
(cell_x == 2 && (cell_y == 1 || cell_y == 5)) ||
(cell_x == 3 && (cell_y == 1 || cell_y == 5)) ||
(cell_x == 4 && (cell_y == 1 || cell_y == 5)) ||
(cell_x == 5 && cell_y > 0 && cell_y < 6)
;
}
static
bool roi_in_bg(int w, int h, Point p)
{
int cell_x = p.x * 7 / w;
int cell_y = p.y * 7 / h;
return cell_in_bg(cell_x, cell_y);
}
static
void roi_mask(Mat &img, int margin_pct)
{
int counts[256] = { 0 };
for (int i = 0; i < img.cols; i++) {
for (int j = 0; j < img.rows; j++) {
uint8_t p = img.at<uint8_t>(Point(i, j));
counts[p]++;
}
}
int cut = 20;
int seen = 0;
int total = img.cols * img.rows;
int p05, p95;
for (p05 = 0; seen < total * cut / 100 && p05 < 256; p05++) {
seen += counts[p05];
}
seen = 0;
for (p95 = 0; seen < total * (100 - cut) / 100 && p95 < 256; p95++) {
seen += counts[p95];
}
printf("p05: %d, p95: %d\n", p05, p95);
int cap = (p95 - p05) * margin_pct / 100;
int min_thres = p05 + cap;
int max_thres = p95 - cap;
for (int i = 0; i < img.cols; i++) {
for (int j = 0; j < img.rows; j++) {
auto pos = Point(i, j);
uint8_t p = img.at<uint8_t>(pos);
if (!roi_in_bg(img.cols, img.rows, pos)) {
img.at<uint8_t>(pos) = 0;
} else if (p < min_thres) {
img.at<uint8_t>(pos) = 0;
} else if (p > max_thres) {
img.at<uint8_t>(pos) = 0;
} else {
img.at<uint8_t>(pos) = 255;
}
}
}
}
static
vector<float> roi_extract_features(Mat &img)
{
vector<int> ones(49, 0);
vector<int> zeroes(49, 0);
for (int i = 0; i < img.cols; i++) {
for (int j = 0; j < img.rows; j++) {
auto pos = Point(i, j);
int cell_x = pos.x * 7 / img.cols;
int cell_y = pos.y * 7 / img.rows;
int idx = cell_y * 7 + cell_x;
assert(idx < 49);
uint8_t p = img.at<uint8_t>(pos);
if (p) {
ones[idx]++;
} else {
zeroes[idx]++;
}
}
}
printf("ones:\n");
for (int i = 0; i < 49; i++) {
printf("%d ", ones[i]);
}
printf("\n");
vector<float> ret;
for (int i = 0; i < 49; i++) {
int cell_x = i % 7;
int cell_y = i / 7;
if (!cell_in_bg(cell_x, cell_y)) {
continue;
}
if (ones[i] || zeroes[i]) {
ret.push_back(ones[i] / (float)(ones[i] + zeroes[i]));
} else {
ret.push_back(0);
}
}
return ret;
}
static
float mean(vector<float> &a)
{
float sum = 0;
if (!a.size()) {
return 0;
}
for (auto x: a) {
sum += x;
}
return sum / a.size();
}
static
float covariance(vector<float> &a, vector<float> &b)
{
float mean_a = mean(a);
float mean_b = mean(b);
float ret = 0;
if (a.size() != b.size()) return 0;
for (size_t i = 0; i < a.size(); i++) {
ret += (a[i] - mean_a) * (b[i] - mean_b);
}
return ret;
}
static inline
bool valid_point(Mat &a, Point p)
{
return p.x > 0 && p.x < a.cols && p.y > 0 && p.y < a.rows;
}
static inline
bool fuzzy_pixel_match(Mat &a, Point pa, Mat &b, Point pb)
{
if (!valid_point(a, pa) || !valid_point(b, pb)) return false;
return a.at<uint8_t>(pa) == b.at<uint8_t>(pb);
}
static
int fuzzy_pixel_cmp(Mat &b, Mat &a)
{
int ret = 0;
int w = a.cols;
int h = a.rows;
assert(a.cols == b.cols);
assert(a.rows == b.rows);
for (int i = 0; i < w; i++) {
for (int j = 0; j < h; j++) {
Point p(i, j);
if (!roi_in_bg(w, h, p)) {
ret++;
continue;
}
bool same = false;
int fuzziness = 1;
for (int ii = -fuzziness; ii <= fuzziness; ii++) {
for (int jj = -fuzziness; jj <= fuzziness; jj++) {
if (fuzzy_pixel_match(a, p, b, Point(i + ii, j + jj))) {
same = true;
goto out;
}
}
}
out:
ret += same ? 1 : 0;
}
}
return ret;
}
double emblem_roi_similarity(SimilarityAlg alg, InputArray std_in, InputArray frame_roi_in, string &err)
{
Mat stdm = *(Mat *)std_in.getObj();
Mat frame_roi = *(Mat *)frame_roi_in.getObj();
err = "";
Mat frame_gray = adaptive_gray(frame_roi);
Mat std_gray = adaptive_gray(stdm);
resize(frame_gray, frame_gray, std_gray.size());
double ret = 0;
Mat frame = frame_gray.clone();
Mat std = std_gray.clone();
roi_mask(frame, 20);
roi_mask(std, 30);
double same = fuzzy_pixel_cmp(frame, std);
double total = frame.rows * frame.cols;
double sim = same / total;
printf("same: %lf, total: %lf, sim: %lf\n", same, total, sim);
auto std_feature = roi_extract_features(std);
auto frame_feature = roi_extract_features(frame);
printf("\nstd:");
for (auto x: std_feature) {
printf("%.2lf ", x * 100);
}
printf("\nfrm:");
for (auto x: frame_feature) {
printf("%.2lf ", x * 100);
}
printf("\n");
double cov = covariance(std_feature, frame_feature);
printf("cov: %lf\n", cov);
double t = cov * sim;
ret = ret > t ? ret : t;
return ret;
}

48
alg/libqr.h Normal file
View File

@ -0,0 +1,48 @@
#ifndef LIBQR_H
#define LIBQR_H
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/core.hpp"
#include "opencv2/calib3d.hpp"
#include <string>
#include <vector>
using namespace cv;
using namespace std;
typedef Mat CvImg;
struct ProcessState {
CvImg *orig;
std::vector<Point> qr_points;
float scale;
Mat transform;
CvImg preprocessed;
CvImg straighten;
Rect qr_rect_in_straighten;
CvImg qr_straighten;
CvImg dot_area;
CvImg dot_area_gray;
string qrcode = "";
double clarity;
float laplacian_thres = 0.1;
};
bool preprocess(ProcessState &ps);
bool emblem_dot_angle(ProcessState &ps, cv::InputArray in, float &angle, std::string &qrcode, std::string &err);
bool detect_qr(ProcessState &ps, float margin_ratio, bool warp, string &err);
enum SimilarityAlg {
CellWeight,
FuzzyPixelCmp,
};
double emblem_roi_similarity(SimilarityAlg alg, InputArray a, InputArray b, string &err);
double laplacian(Mat &gray, string &err);
static inline void showimg_(const char *title, Mat &img) {
imshow(title, img);
waitKey(0);
}
#define show(img) showimg_(#img, img)
#endif

169
alg/mq_worker.cpp Normal file
View File

@ -0,0 +1,169 @@
#include <thread>
#include <iostream>
#include <json/json.h>
#include "base64.h"
#include "mq_worker.h"
#include <pulsar/Client.h>
using namespace pulsar;
using namespace std;
// Define a simple struct with string fields
struct MqMessage {
string space;
string path;
string result_topic;
vector<uint8_t> bytes;
};
struct Response {
string path;
bool succeeded;
string result_path;
vector<uint8_t> output;
size_t size;
string error;
};
static
MqMessage parse_message(const std::string& str) {
Json::CharReaderBuilder builder;
Json::Value root;
std::istringstream jsonStream(str);
Json::parseFromStream(builder, jsonStream, &root, nullptr);
MqMessage msg;
msg.space = root["space"].asString();
msg.path = root["path"].asString();
msg.result_topic = root["result_topic"].asString();
msg.bytes = base64_decode(root["data_b64"].asString());
return msg;
}
static
std::string response_to_json(const Response& msg) {
Json::Value root;
root["path"] = msg.path;
root["succeeded"] = msg.succeeded;
root["size"] = msg.size;
if (msg.error.size()) {
root["error"] = msg.error;
}
Json::Value ofs;
Json::Value of;
of["path"] = msg.result_path;
of["data_b64"] = base64_encode(msg.output.data(), msg.output.size());
ofs.append(of);
root["output_files"] = ofs;
Json::StreamWriterBuilder builder;
std::string str = Json::writeString(builder, root);
return str;
}
int mq_worker(const char *topic, const char *worker_name, handler_fn handle_image) {
Client client("pulsar://localhost:6650");
Producer producer;
string prev_producer_topic;
Consumer consumer;
ConsumerConfiguration config;
config.setConsumerType(ConsumerShared);
config.setSubscriptionInitialPosition(InitialPositionEarliest);
Result result = client.subscribe(topic, worker_name, config, consumer);
if (result != ResultOk) {
cout << "Failed to subscribe: " << result << endl;
return -1;
}
Message mq_msg;
int processed = 0;
int failed = 0;
while (1) {
consumer.receive(mq_msg);
auto payload = mq_msg.getDataAsString();
auto msg = parse_message(payload);
if (processed % 1000 == 0) {
cout << processed << ": " << msg.path << " " << msg.bytes.size() << endl;
}
Response resp;
resp.path = msg.path;
resp.size = msg.bytes.size();
resp.succeeded = true;
int r = handle_image(msg.path,
msg.bytes,
resp.result_path,
resp.output);
if (r) {
resp.succeeded = false;
resp.error = string("error ") + to_string(r);
}
auto reply = response_to_json(resp);
if (prev_producer_topic != msg.result_topic) {
Result result = client.createProducer(msg.result_topic, producer);
if (result != ResultOk) {
cerr << "Error creating producer: " << result << endl;
return -1;
}
prev_producer_topic = msg.result_topic;
}
Message result_msg = MessageBuilder().setContent(reply).build();
Result result = producer.send(result_msg);
if (result != ResultOk) {
cerr << "Error sending reply: " << result << endl;
consumer.negativeAcknowledge(mq_msg);
failed++;
} else {
processed++;
consumer.acknowledge(mq_msg);
}
if (processed % 1000 == 0) {
cout << "processed: " << processed << ", failed: " << failed << endl;
}
}
client.close();
return 0;
}
#if 0
static
int test_pulsar_worker() {
Client client("pulsar://localhost:6650");
Producer producer;
Result result = client.createProducer("persistent://public/default/my-topic", producer);
if (result != ResultOk) {
std::cout << "Error creating producer: " << result << std::endl;
return -1;
}
// Send 100 messages synchronously
int ctr = 0;
while (ctr < 100) {
std::string content = "msg" + std::to_string(ctr);
Message msg = MessageBuilder().setContent(content).setProperty("x", "1").build();
Result result = producer.send(msg);
if (result != ResultOk) {
std::cout << "The message " << content << " could not be sent, received code: " << result << std::endl;
} else {
std::cout << "The message " << content << " sent successfully" << std::endl;
}
std::this_thread::sleep_for(std::chrono::milliseconds(100));
ctr++;
}
std::cout << "Finished producing synchronously!" << std::endl;
client.close();
return 0;
}
#endif

12
alg/mq_worker.h Normal file
View File

@ -0,0 +1,12 @@
#ifndef _MQ_WORKER_H_
#define _MQ_WORKER_H_
#include <stdint.h>
#include <vector>
typedef int (*handler_fn)(const std::string &input_path,
const std::vector<uint8_t> &input,
std::string &output_path,
std::vector<uint8_t> &output);
int mq_worker(const char *topic, const char *worker_name, handler_fn handle_image);
#endif

1
alg/post.wx.js Normal file
View File

@ -0,0 +1 @@
module.exports = Module;

15
alg/pre.wx.js Normal file
View File

@ -0,0 +1,15 @@
var window = {};
var WA = WXWebAssembly;
var WebAssembly = WA;
WebAssembly.RuntimeErrror = Error;
var performance = {
now: Date.now,
};
Module['instantiateWasm'] = (info, receiveInstance) => {
console.log("loading wasm...", info);
WebAssembly.instantiate("assets/qrtool.wx.wasm.br", info).then((result) => {
console.log("result:", result);
var inst = result['instance'];
receiveInstance(inst);
});
}

107
alg/qr.ipynb Normal file

File diff suppressed because one or more lines are too long

646
alg/qrtool.cpp Normal file
View File

@ -0,0 +1,646 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
#include <iostream>
#include <vector>
#include <algorithm>
#include <iostream>
#include <fstream>
#include <chrono>
#include <filesystem>
#include <sys/stat.h>
#include <sys/types.h>
#include <stdlib.h>
#include <unistd.h>
#include "opencv2/objdetect.hpp"
#include "mq_worker.h"
#if ENABLE_GRPC
#include "fileprocess.h"
#endif
#include "http.h"
#include "libqr.h"
static
int detect_cmd(char **argv, int argc)
{
char *file = argv[0];
auto orig = imread(file);
QRCodeDetector detector;
Mat points;
Mat straight;
auto r = detector.detectAndDecode(orig, points, straight);
printf("r: %s\n", r.c_str());
printf("points: %d %d\n", points.rows, points.cols);
for (int i = 0; i < points.cols; i++) {
printf("%f ", points.at<float>(0, i));
}
printf("\n");
return 0;
}
static
int angle_cmd(char **argv, int argc)
{
char *file = argv[0];
printf("file: %s\n", file);
Mat orig = imread(file);
string qrcode, err;
float angle;
ProcessState ps;
auto r = emblem_dot_angle(ps, orig, angle, qrcode, err);
if (!r) {
cerr << r << ":" << err << endl;
return 1;
}
printf("angle: %.1f\n", angle);
printf("qrcode: %s\n", qrcode.c_str());
return 0;
}
static
int dot_cmd(char **argv, int argc)
{
ProcessState ps;
char *file = argv[0];
printf("file: %s\n", file);
Mat orig = imread(file);
string qrcode, err;
float angle;
auto r = emblem_dot_angle(ps, orig, angle, qrcode, err);
if (!r) {
cerr << r << ":" << err << endl;
return 1;
}
string outfile = string(file) + ".dot.jpg";
printf("angle: %.1f\n", angle);
printf("qrcode: %s\n", qrcode.c_str());
printf("saving dot file: %s\n", outfile.c_str());
imwrite(outfile, ps.dot_area);
return 0;
}
static
int clarity_cmd(char **argv, int argc)
{
string err;
char *file = argv[0];
printf("file: %s\n", file);
Mat orig = imread(file);
Mat gray;
cvtColor(orig, gray, COLOR_BGR2GRAY);
auto c = laplacian(gray, err);
printf("clarity: %lf\n", c);
return 0;
}
static
int rectify_cmd(char **argv, int argc)
{
char *file = argv[0];
string err;
ProcessState ps;
Mat orig = imread(file);
ps.orig = &orig;
preprocess(ps);
if (!detect_qr(ps, 0.20, false, err)) {
cerr << err << endl;
return 1;
}
string outfile = string(file) + ".qr.jpg";
imwrite(outfile, ps.straighten);
return 0;
}
static
int topleft_cmd(char **argv, int argc)
{
char *file = argv[0];
string err;
ProcessState ps;
Mat orig = imread(file);
ps.orig = &orig;
preprocess(ps);
if (!detect_qr(ps, 0.02, true, err)) {
cerr << err << endl;
return 1;
}
string outfile = string(file) + ".topleft.jpg";
Mat &base = ps.straighten;
auto crop = Rect(0, 0, base.cols / 2, base.rows / 2);
Mat result = base(crop);
imwrite(outfile, result);
return 0;
}
static
int find_roi_start_point(Mat &bin, Point &p)
{
int npoints = 4;
for (int i = 0; i < bin.cols / 3; i++) {
uchar sum = 0;
for (int j = 0; j < npoints; j++) {
int v = i + j;
sum += bin.at<uchar>(v, v);
}
if (sum == 0) {
p = Point(i, i);
return 0;
}
}
cerr << "find_roi_start_point" << endl;
return -1;
}
static
vector<int> count_black(Mat &bin, bool count_rows, int size)
{
vector<int> ret;
for (int i = 0; i < size; i++) {
int count = 0;
for (int j = 0; j < size; j++) {
int x = count_rows ? j : i;
int y = count_rows ? i : j;
if (bin.at<uchar>(y, x) == 0) count++;
}
ret.push_back(count);
}
return ret;
}
static int find_start_of_first_black_range(const vector<int> &data)
{
size_t i = 0;
int m = *std::max_element(data.begin(), data.end());
int thres = m * 50 / 100;
while (i < data.size() - 3) {
if (data[i] >= thres && data[i + 1] >= thres && data[i + 2] >= thres) {
break;
}
i++;
}
if (i >= data.size() - 3) return -1;
return i;
}
static int find_end_of_first_black_range(const vector<int> &data)
{
size_t i = 0;
int m = *std::max_element(data.begin(), data.end());
int thres = m * 50 / 100;
while (i < data.size() - 3) {
if (data[i] >= thres && data[i + 1] >= thres && data[i + 2] >= thres) {
break;
}
i++;
}
if (i >= data.size() - 3) return -1;
while (i < data.size() - 3) {
if (data[i] < thres || data[i + 1] < thres || data[i + 2] < thres) {
break;
}
i++;
}
if (i >= data.size() - 3) return -1;
return i;
}
static
int find_roi_rect(Mat &bin, Point &start, Rect &rect, bool inner, string &err)
{
Mat visited;
bin.copyTo(visited);
vector<Point> q;
q.push_back(start);
int min_x = bin.rows, min_y = bin.rows, max_x = 0, max_y = 0;
int orig_size = max(bin.rows, bin.cols);
while (q.size()) {
Point p = q.back();
q.pop_back();
visited.at<uchar>(p.y, p.x) = 255;
min_x = min(min_x, p.x);
min_y = min(min_y, p.y);
max_x = max(max_x, p.x);
max_y = max(max_y, p.y);
for (int xoff = -1; xoff <= 1; xoff++) {
for (int yoff = -1; yoff <= 1; yoff++) {
int x = p.x + xoff;
int y = p.y + yoff;
if (x >= 0 && x < visited.cols && y >= 0 && y < visited.rows) {
auto v = visited.at<uchar>(y, x);
if (v == 0) {
if (q.size() >= 20000) {
err = string("roi detected range too large: ") + to_string(q.size());
return -1;
}
q.push_back(Point(x, y));
visited.at<uchar>(y, x) = 255;
}
}
}
}
}
if (max_x - min_x < 50 || max_y - min_y < 50) {
err = "detected roi outer region too small";
return -1;
}
auto size = std::max(max_x, max_y);
auto row_sums = count_black(bin, true, size);
auto col_sums = count_black(bin, false, size);
min_x = inner ? find_end_of_first_black_range(col_sums) : find_start_of_first_black_range(col_sums);
min_y = inner ? find_end_of_first_black_range(row_sums) : find_start_of_first_black_range(row_sums);
if (min_x < 0 || min_y < 0) {
err = "min_x or min_y is negative";
return -1;
}
// find the max values, similarly
std::reverse(col_sums.begin(), col_sums.end());
std::reverse(row_sums.begin(), row_sums.end());
max_x = size - (inner ? find_end_of_first_black_range(col_sums) : find_start_of_first_black_range(col_sums));
max_y = size - (inner ? find_end_of_first_black_range(row_sums) : find_start_of_first_black_range(row_sums));
if (max_x < 0 || max_y < 0) return -1;
if (max_x - min_x < 50 || max_y - min_y < 50) {
err = "detected roi region too small";
return -1;
}
size = (max_x - min_x + max_y - min_y) / 2;
if (size < orig_size / 5 || size > orig_size * 3 / 5) {
err = "size of found region is out of valid range";
return -1;
}
rect = Rect(min_x + 1, min_y + 1, size, size);
return 0;
}
static void get_bin(Mat &orig, Mat &out)
{
Mat gray;
Mat filtered;
Point start;
Rect roi_rect;
cvtColor(orig, gray, COLOR_BGR2GRAY);
convertScaleAbs(gray, gray, 2);
// bilateralFilter(gray, filtered, 9, 150, 150, BORDER_DEFAULT);
medianBlur(gray, gray, 9);
threshold(gray, out, 128, 255, THRESH_BINARY);
}
static
int find_roi(Mat &qr_straighten, Mat &roi, bool inner, string &err)
{
Mat bin;
Rect topleft_r(0, 0, qr_straighten.cols / 2, qr_straighten.rows / 2);
Mat topleft = qr_straighten(topleft_r);
get_bin(topleft, bin);
Point start;
Rect roi_rect;
auto r = find_roi_start_point(bin, start);
if (r) {
err = "failed to find roi start point";
return r;
}
r = find_roi_rect(bin, start, roi_rect, inner, err);
if (r) {
return r;
}
roi = qr_straighten(roi_rect);
return 0;
}
int roi_process_one(const char *file, bool inner, string &err, bool warp, Mat *roi_out)
{
Mat roi;
Mat orig = imread(file, IMREAD_COLOR);
Mat qr_with_margin;
if (warp) {
ProcessState ps;
ps.orig = &orig;
preprocess(ps);
if (!detect_qr(ps, 0.02, true, err)) {
cerr << err << ":" << file << endl;
return 1;
}
qr_with_margin = ps.straighten;
} else {
qr_with_margin = orig;
}
if (qr_with_margin.cols <= 0 || qr_with_margin.rows <= 0) return -1;
auto r = find_roi(qr_with_margin, roi, inner, err);
if (r) return r;
if (roi_out) {
*roi_out = roi;
}
return 0;
}
static bool is_dir(char *path)
{
struct stat st;
if (lstat(path, &st) == 0) {
if (S_ISREG(st.st_mode)) {
return false;
} else if (S_ISDIR(st.st_mode)) {
return true;
} else {
return false;
}
} else {
perror("Error in lstat");
}
return false;
}
static void save_roi(string orig_file, Mat &roi)
{
string outfile = orig_file + ".roi.jpg";
cout << "save: " << outfile << endl;
imwrite(outfile, roi);
}
static
int frame_roi_cmd(char **argv, int argc)
{
char *file = argv[0];
string err;
int ret = 0;
Mat roi;
cout << "frame roi processing: " << file << endl;
ret = roi_process_one(file, false, err, true, &roi);
if (ret) {
cerr << "failed to process: " << file << ":" << err <<endl;
return ret;
}
save_roi(file, roi);
return ret;
}
static
int roi_cmd(char **argv, int argc)
{
char *file = argv[0];
string err;
int ret = 0;
cout << "roi processing: " << file << endl;
if (is_dir(file)) {
for (auto const& dir_entry : filesystem::directory_iterator{file}) {
auto path = dir_entry.path();
Mat roi;
if (roi_process_one(path.c_str(), false, err, false, &roi) != 0) {
cerr << "failed: " << path << ":" << err << endl;
ret = 1;
}
save_roi(file, roi);
}
} else {
Mat roi;
ret = roi_process_one(file, false, err, false, &roi);
if (ret) {
cerr << "failed to process: " << file << ":" << err <<endl;
} else {
save_roi(file, roi);
}
}
return ret;
}
static
int roi_bench_cmd(char **argv, int argc)
{
char *file = argv[0];
int n = 0;
string err;
auto begin = chrono::system_clock::now();
for (auto const& dir_entry : filesystem::directory_iterator{file}) {
auto path = dir_entry.path();
if (roi_process_one(path.c_str(), false, err, false, NULL) == 0) {
n += 1;
}
}
auto end = chrono::system_clock::now();
std::chrono::duration<float> duration = end - begin;
float seconds = duration.count();
printf("qps: %.1f\n", n / seconds);
return 0;
}
#if USE_PULSAR
static vector<string> split_path(const string &path)
{
vector<string> ret;
string cur = "";
for (auto x: path) {
if (x == '/') {
if (cur.size()) {
ret.push_back(cur);
cur = "";
}
} else {
cur += x;
}
}
if (cur.size()) {
ret.push_back(cur);
cur = "";
}
return ret;
}
static string join_path(const vector<string> fs)
{
string ret;
const string sep = "/";
for (auto x: fs) {
ret += sep + x;
}
if (ret.size() == 0) ret = "/";
return ret;
}
static string get_output_path(const string &path)
{
auto ret = split_path(path);
ret[3] = "roi";
return join_path(ret);
}
static
int roi_worker_handle_image(const string &input_path,
const vector<uint8_t> &input,
string &output_path,
vector<uint8_t> &output)
{
Mat roi;
Mat orig = imdecode(input, IMREAD_COLOR);
auto r = find_roi(orig, roi, true, true);
if (r) return r;
imencode(".jpg", roi, output);
output_path = get_output_path(input_path);
return 0;
}
static
int roi_worker_handle_image_nop(const string &input_path,
const vector<uint8_t> &input,
string &output_path,
vector<uint8_t> &output)
{
output.push_back('f');
output.push_back('o');
output.push_back('o');
output_path = get_output_path(input_path);
return 0;
}
static
int roi_worker_cmd(char *topic)
{
string worker_name = "roi-worker-";
worker_name += to_string(rand());
return mq_worker(topic, "roi-worker", roi_worker_handle_image);
}
static
int roi_worker_nop_cmd(char *topic)
{
string worker_name = "roi-worker-";
worker_name += to_string(rand());
return mq_worker(topic, "roi-worker", roi_worker_handle_image_nop);
}
#endif
#if ENABLE_GRPC
static
int grpc_server_cmd(char *addr)
{
return run_server(addr, roi_worker_handle_image);
}
#endif
static
int http_server_handle_image(const vector<uint8_t> &input,
vector<uint8_t> &output)
{
string err;
Mat roi;
Mat orig = imdecode(input, IMREAD_COLOR);
if (orig.empty()) {
return -EINVAL;
}
auto r = find_roi(orig, roi, false, err);
if (r) return r;
imencode(".jpg", roi, output);
return 0;
}
static
int http_server_cmd(char **argv, int argc)
{
char *port = argv[0];
return start_http_server(atoi(port), http_server_handle_image);
}
static
int verify_cmd(char **args, int nargs)
{
char *std_file = args[0];
char *frame_file = args[1];
Mat std = imread(std_file);
Mat frame = imread(frame_file);
Mat roi;
int r;
string err;
r = roi_process_one(frame_file, false, err, true, &roi);
if (r) {
printf("failed to find roi: %s\n", err.c_str());
return r;
}
double s = emblem_roi_similarity(FuzzyPixelCmp, std, roi, err);
if (err.size()) {
printf("err: %s\n", err.c_str());
return 1;
} else {
printf("similarity: %f\n", s); return 0;
return 0;
}
}
static
void usage(const char *name, vector<string> &cmds)
{
printf("usage: %s <cmd> <arg0>\n", name);
printf("or for 2 args: %s <cmd> <arg0> <arg1>\n", name);
printf("possible commands:\n");
for (auto cmd: cmds) {
printf(" %s\n", cmd.c_str());
}
}
#ifdef QRTOOL_MAIN
int main(int argc, char *argv[])
{
string cmd = "help";
if (argc > 1) {
cmd = argv[1];
}
vector<string> cmds;
#define add_cmd(c, nargs) \
do { \
cmds.push_back(#c); \
if (cmd == #c && argc >= 2 + nargs) return c##_cmd(&argv[2], argc - 2); \
} while (0)
add_cmd(detect, 1);
add_cmd(angle, 1);
add_cmd(dot, 1);
add_cmd(clarity, 1);
add_cmd(rectify, 1);
add_cmd(topleft, 1);
add_cmd(frame_roi, 1);
add_cmd(roi, 1);
add_cmd(roi_bench, 1);
#if USE_PULSAR
add_cmd(roi_worker, 1);
add_cmd(roi_worker_nop, 1);
#endif
#if ENABLE_GRPC
add_cmd(grpc_server, 1);
#endif
add_cmd(http_server, 1);
add_cmd(verify, 2);
usage(argv[0], cmds);
return 1;
}
#endif

59
alg/qrtool_wasm.cpp Normal file
View File

@ -0,0 +1,59 @@
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/core.hpp"
#include "opencv2/calib3d.hpp"
#include "libqr.h"
using namespace cv;
using namespace std;
static
std::string make_resp(bool ok, string err, int angle = -1, string qrcode = "", double elapsed = 0)
{
char buf[512];
snprintf(buf, sizeof(buf), R"({ "ok": %s, "err": "%s", "qrcode": "%s", "angle": %d, "elapsed": %lf })",
ok ? "true" : "false",
err.c_str(),
qrcode.c_str(),
angle,
elapsed
);
return string(buf);
}
extern "C" {
const char *qrtool_angle(uint8_t *data, int width, int height, uint8_t *dot_area, float camera_sensitivity) {
ProcessState ps;
ps.laplacian_thres = camera_sensitivity / 10.0;
auto start = std::chrono::system_clock::now();
static char ret[512];
printf("qrtool_angle, width: %d height %d\n", width, height);
for (int i = 0; i < 16; i++) {
printf("%d ", data[i]);
}
printf("\n");
Mat orig(Size(width, height), CV_8UC4, data);
printf("mat: %d %d\n", orig.cols, orig.rows);
string qrcode, err;
float angle;
auto ok = emblem_dot_angle(ps, orig, angle, qrcode, err);
auto end = std::chrono::system_clock::now();
std::chrono::duration<double> elapsed = end-start;
auto x = make_resp(ok, err, angle, qrcode, elapsed.count());
if (dot_area) {
if (!ps.dot_area.empty()) {
Mat da;
ps.dot_area.convertTo(da, CV_8UC4);
resize(da, da, Size(32 ,32));
memset(dot_area, 255, 32 * 32 * 4);
memcpy(dot_area, da.ptr(), 32 * 32 * 4);
} else {
memset(dot_area, 55, 32 * 32 * 4);
}
}
snprintf(ret, 512, "%s", x.c_str());
return ret;
}
}

115
alg/server.py Executable file
View File

@ -0,0 +1,115 @@
#!/usr/bin/env python3
import os
import json
import argparse
import subprocess
import tempfile
import base64
from flask import Flask, request, Response
app = Flask(__name__)
@app.route('/', methods=['GET'])
@app.route('/alg', methods=['GET'])
def index():
return make_resp({})
def oss_save_image(data_url, file_prefix):
ak = 'LTAI5tC2qXGxwHZUZP7DoD1A'
sk = 'qPo9O6ZvEfqo4t8oflGEm0DoxLHJhm'
auth = oss2.Auth(ak, sk)
endpoint = 'oss-cn-guangzhou.aliyuncs.com'
bucket_name = 'emblem-roi-samples'
bucket = oss2.Bucket(auth, endpoint, bucket_name)
pref = 'data:image/png;base64,'
if data_url.startswith(pref):
decoded = base64.b64decode(data_url[len(pref):])
fname = file_prefix + str(time.time()) + ".png"
bucket.put_object(fname, decoded)
def make_resp(content):
resp = Response(json.dumps(content))
resp.headers['Access-Control-Allow-Origin'] = "*"
resp.headers['Access-Control-Allow-Methods'] = '*'
resp.headers['Access-Control-Allow-Headers'] = '*'
resp.headers['Content-Type'] = 'application/json'
return resp
@app.route('/alg/angle', methods=['POST', 'OPTIONS'])
def angle():
try:
return handle_angle_request()
except Exception as e:
return make_resp({
"ok": False,
"err": str(e),
})
def handle_angle_request():
if request.method == 'OPTIONS':
return make_resp({})
body = request.stream.read()
pref = 'data:image/jpeg;base64,'
err = "Cannot detect angle"
angle = -1
try:
encoded = body.decode()
if encoded.startswith(pref):
decoded = base64.b64decode(encoded[len(pref):])
else:
decoded = body
except:
decoded = body
with tempfile.NamedTemporaryFile(dir="uploads", suffix=".jpg", delete=False) as tf:
tf.write(decoded)
tf.flush()
cmd = ['./qrtool', 'angle', tf.name]
print(" ".join(cmd))
r = subprocess.run(cmd, capture_output=True)
if r.returncode == 0:
lines = r.stdout.decode().splitlines()
return make_resp({
"ok": True,
"qrcode": lines[0].split()[1],
"angle": int(lines[1].split()[1])
})
else:
err = r.stderr.decode()
return make_resp({
"ok": False,
"err": err,
})
def handle_rectify_request():
body = request.stream.read()
with tempfile.NamedTemporaryFile() as tf:
tf.write(body)
tf.flush()
outf = tf.name + ".qr.jpg"
cmd = ['./qrtool', 'rectify', tf.name]
subprocess.check_call(cmd)
with open(outf, 'rb') as of:
resp = Response(of.read(), status=200, mimetype='image/jpeg')
os.unlink(outf)
return resp
@app.route('/alg/rectify', methods=['POST', 'OPTIONS'])
def rectify():
try:
return handle_rectify_request()
except Exception as e:
return make_resp({
"ok": False,
"err": str(e),
})
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--host", "-l", default="0.0.0.0")
parser.add_argument("--port", "-p", type=int, default=3028)
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
app.run(host=args.host, port=args.port)

11
alg/string_format.h Normal file
View File

@ -0,0 +1,11 @@
template<typename ... Args>
std::string string_format( const std::string& format, Args ... args )
{
int size_s = std::snprintf( nullptr, 0, format.c_str(), args ... ) + 1; // Extra space for '\0'
if( size_s <= 0 ){ throw std::runtime_error( "Error during formatting." ); }
auto size = static_cast<size_t>( size_s );
std::unique_ptr<char[]> buf( new char[ size ] );
std::snprintf( buf.get(), size, format.c_str(), args ... );
return std::string( buf.get(), buf.get() + size - 1 ); // We don't want the '\0' inside
}

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@ -0,0 +1,403 @@
layer {
name: "data"
type: "Input"
top: "data"
input_param {
shape {
dim: 1
dim: 1
dim: 224
dim: 224
}
}
}
layer {
name: "conv0"
type: "Convolution"
bottom: "data"
top: "conv0"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 32
bias_term: true
pad: 1
kernel_size: 3
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "conv0/lrelu"
type: "ReLU"
bottom: "conv0"
top: "conv0"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db1/reduce"
type: "Convolution"
bottom: "conv0"
top: "db1/reduce"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 8
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "db1/reduce/lrelu"
type: "ReLU"
bottom: "db1/reduce"
top: "db1/reduce"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db1/3x3"
type: "Convolution"
bottom: "db1/reduce"
top: "db1/3x3"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 8
bias_term: true
pad: 1
kernel_size: 3
group: 8
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "db1/3x3/lrelu"
type: "ReLU"
bottom: "db1/3x3"
top: "db1/3x3"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db1/1x1"
type: "Convolution"
bottom: "db1/3x3"
top: "db1/1x1"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 32
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "db1/1x1/lrelu"
type: "ReLU"
bottom: "db1/1x1"
top: "db1/1x1"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db1/concat"
type: "Concat"
bottom: "conv0"
bottom: "db1/1x1"
top: "db1/concat"
concat_param {
axis: 1
}
}
layer {
name: "db2/reduce"
type: "Convolution"
bottom: "db1/concat"
top: "db2/reduce"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 8
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "db2/reduce/lrelu"
type: "ReLU"
bottom: "db2/reduce"
top: "db2/reduce"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db2/3x3"
type: "Convolution"
bottom: "db2/reduce"
top: "db2/3x3"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 8
bias_term: true
pad: 1
kernel_size: 3
group: 8
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "db2/3x3/lrelu"
type: "ReLU"
bottom: "db2/3x3"
top: "db2/3x3"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db2/1x1"
type: "Convolution"
bottom: "db2/3x3"
top: "db2/1x1"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 32
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "db2/1x1/lrelu"
type: "ReLU"
bottom: "db2/1x1"
top: "db2/1x1"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "db2/concat"
type: "Concat"
bottom: "db1/concat"
bottom: "db2/1x1"
top: "db2/concat"
concat_param {
axis: 1
}
}
layer {
name: "upsample/reduce"
type: "Convolution"
bottom: "db2/concat"
top: "upsample/reduce"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 32
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "upsample/reduce/lrelu"
type: "ReLU"
bottom: "upsample/reduce"
top: "upsample/reduce"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "upsample/deconv"
type: "Deconvolution"
bottom: "upsample/reduce"
top: "upsample/deconv"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 32
bias_term: true
pad: 1
kernel_size: 3
group: 32
stride: 2
weight_filler {
type: "msra"
}
}
}
layer {
name: "upsample/lrelu"
type: "ReLU"
bottom: "upsample/deconv"
top: "upsample/deconv"
relu_param {
negative_slope: 0.05000000074505806
}
}
layer {
name: "upsample/rec"
type: "Convolution"
bottom: "upsample/deconv"
top: "upsample/rec"
param {
lr_mult: 1.0
decay_mult: 1.0
}
param {
lr_mult: 1.0
decay_mult: 0.0
}
convolution_param {
num_output: 1
bias_term: true
pad: 0
kernel_size: 1
group: 1
stride: 1
weight_filler {
type: "msra"
}
}
}
layer {
name: "nearest"
type: "Deconvolution"
bottom: "data"
top: "nearest"
param {
lr_mult: 0.0
decay_mult: 0.0
}
convolution_param {
num_output: 1
bias_term: false
pad: 0
kernel_size: 2
group: 1
stride: 2
weight_filler {
type: "constant"
value: 1.0
}
}
}
layer {
name: "Crop1"
type: "Crop"
bottom: "nearest"
bottom: "upsample/rec"
top: "Crop1"
}
layer {
name: "fc"
type: "Eltwise"
bottom: "Crop1"
bottom: "upsample/rec"
top: "fc"
eltwise_param {
operation: SUM
}
}

67
alg/worker.py Executable file
View File

@ -0,0 +1,67 @@
#!/usr/bin/env python3
import os
import uuid
import json
import base64
import tempfile
import subprocess
import pulsar
client = pulsar.Client('pulsar://localhost:6650')
worker_id = str(uuid.uuid4())
producer = client.create_producer('estor')
result_producer = None
consumer = client.subscribe('roi', f'roi-worker-{worker_id}')
def import_file(fname, content):
print('Import file', fname, len(content))
producer.send(fname.encode() + b'\0' + content)
def make_roi_path(orig):
comps = [x for x in orig.split('/') if x.strip()]
new_comps = comps[:3] + ['roi'] + comps[4:]
return '/' + '/'.join(new_comps)
def handle_qr(fname, content, result_topic):
with tempfile.NamedTemporaryFile(suffix=".jpg") as tf:
tf.write(content)
tf.flush()
cmd = ['./qrtool', 'roi', tf.name]
subprocess.check_call(cmd)
newfile = fname + ".roi.jpg"
with open(tf.name + ".roi.jpg", 'rb') as f:
roi_data = f.read()
global result_producer
if not result_producer or result_producer.topic() != result_topic:
result_producer = client.create_producer(result_topic)
roi_path = make_roi_path(fname)
resp = {
'path': fname,
'succeeded': True,
'output_files': [{
'path': roi_path,
'data_b64': base64.b64encode(roi_data).decode(),
}],
'size': len(content),
}
result_producer.send(json.dumps(resp).encode())
def roi_worker():
while True:
msg = consumer.receive()
try:
body = msg.data()
print("Received message id='{}'".format(msg.message_id()))
payload = json.loads(body)
fname = payload['path']
content = base64.b64decode(payload['data_b64'])
handle_qr(fname, content, payload['result_topic'])
consumer.acknowledge(msg)
except Exception as e:
print(e)
consumer.negative_acknowledge(msg)
# import_file("/emblem/batches/test-batch/import/test.jpg", open('/etc/fstab', 'rb').read())
roi_worker()

2
api/.dockerignore Normal file
View File

@ -0,0 +1,2 @@
/web/node_modules
/qrreader/target

69
api/Makefile Normal file
View File

@ -0,0 +1,69 @@
IMAGE_TAG ?= $(shell git describe --always)
.PHONY: FORCE
DOCKER := docker
all:
$(MAKE) web
$(MAKE) docker
$(MAKE) push
docker:
$(DOCKER) build -t emblem:$(IMAGE_TAG) .
push:
for tag in $(IMAGE_TAG) $(CI_COMMIT_BRANCH); do \
$(DOCKER) tag emblem:$(IMAGE_TAG) registry.gitlab.com/euphon/emblem:$(IMAGE_TAG) &&\
$(DOCKER) push registry.gitlab.com/euphon/emblem:$(IMAGE_TAG); \
done
docker-run:
$(DOCKER) build --network=host -t emblem:$(IMAGE_TAG) .
$(DOCKER) run -ti --rm -p 12345:80 emblem:$(IMAGE_TAG)
web: FORCE
cd web; npm run build
test:
cd api; ./manage.py migrate
cd api; ./manage.py test tests
stress:
cd api; ./manage.py test tests.stress
deploy: deploy-dev deploy-prod
deploy-dev: FORCE
./scripts/deploy --kubeconfig deploy/kubeconfig.derby \
--db-host postgres-postgresql.db \
--emblem-env dev \
-n emblem \
-i registry.gitlab.com/euphon/emblem:$(IMAGE_TAG)
deploy-g: FORCE
./scripts/deploy --kubeconfig deploy/kubeconfig.g \
--db-host 10.42.0.1 \
--emblem-env prod \
-n emblem \
-i registry.gitlab.com/euphon/emblem:$(IMAGE_TAG)
deploy-prod: FORCE
./scripts/deploy --kubeconfig deploy/kubeconfig.themblem \
--db-host 192.168.33.175 \
--emblem-env prod \
-n emblem \
-i registry.gitlab.com/euphon/emblem:$(IMAGE_TAG)
run:
./scripts/run-tmux.sh
vm: FORCE vm/sys.img $(DATA_IMGS)
q q +vblk:vm/sys.img +sd:vm/ext.img -f --no-net -- \
-bios /usr/share/ovmf/OVMF.fd \
-serial stdio \
-device virtio-blk,drive=sys \
-netdev user,id=n0,hostfwd=::10022-:22,hostfwd=::6006-:6006,hostfwd=::13000-:3000,hostfwd=::18000-:8000 \
-device virtio-net-pci,netdev=n0 \
$(shell for i in 0 1 2 3 4 5 6 7 8; do echo -drive file=vm/data-$$i.img,if=none,id=d$$i -device nvme,serial=NVME_$$i,drive=d$$i; done) \
-device virtio-scsi \
-drive file=$(EXT_IMG),if=none,id=ext0 -device scsi-hd,drive=ext0

3
api/README.md Normal file
View File

@ -0,0 +1,3 @@
# Emblem
The Emblem Project

1
api/api/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
db.sqlite3

16
api/api/emblemapi/asgi.py Normal file
View File

@ -0,0 +1,16 @@
"""
ASGI config for emblemapi project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'emblemapi.settings')
application = get_asgi_application()

View File

@ -0,0 +1,181 @@
"""
Django settings for emblemapi project.
Generated by 'django-admin startproject' using Django 3.2.10.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from keys import *
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-po^713agbnee6w8&ovj-9@)cyv&-&1q&v8%88(e3o)okn%de_3'
# SECURITY WARNING: don't run with debug turned on in production!
ENV = os.environ.get("EMBLEM_ENV", "debug")
DEBUG = ENV in ["debug", "dev"]
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'tastypie',
'products',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ALLOW_ALL_ORIGINS = True
ROOT_URLCONF = 'emblemapi.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'emblemapi.wsgi.application'
APPEND_SLASH = False
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
if os.environ.get("EMBLEM_DB_TYPE") == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'emblem',
'USER': 'emblem',
'PASSWORD': 'emblempass',
'HOST': os.environ["EMBLEM_DB_HOST"],
'PORT': '5432',
},
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'emblem_cache_table',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/api/static/'
STATIC_ROOT = './static'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
DATA_UPLOAD_MAX_MEMORY_SIZE = 20 << 20
if ENV == "prod":
OSS = aliyun_prod_key
FEATURES_BUCKET = "emblem-features-prod"
ARCHIVE_BUCKET = "emblem-archive-prod"
else:
OSS = aliyun_dev_key
FEATURES_BUCKET = "emblem-features-dev-1"
ARCHIVE_BUCKET = "emblem-oss-archive-dev-1"
IPINFO_TOKEN = '537dea9ec5c99a'
TOKEN_EXPIRE_MINUTES = 180
ADMINS = [('Fam Zheng', 'fam@euphon.net')]
EMAIL_HOST = 'smtpdm.aliyun.com'
EMAIL_PORT = 465
EMAIL_USE_SSL = True
EMAIL_HOST_USER = 'noreply@emblem-notify.euphon.net'
EMAIL_HOST_PASSWORD = 'N72yBNi4cJw'
SERVER_EMAIL = EMAIL_HOST_USER
CSRF_TRUSTED_ORIGINS = ['https://*.themblem.com', 'https://themblem.com']

23
api/api/emblemapi/urls.py Normal file
View File

@ -0,0 +1,23 @@
"""emblemapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('api/admin/', admin.site.urls),
path('api/', include('products.urls')),
path('v/', include('products.v_urls')),
]

16
api/api/emblemapi/wsgi.py Normal file
View File

@ -0,0 +1,16 @@
"""
WSGI config for emblemapi project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'emblemapi.settings')
application = get_wsgi_application()

5
api/api/euphon.py Normal file
View File

@ -0,0 +1,5 @@
import requests
def send_alert(msg):
url = 'https://euphon-ps-alert-lgtmbklwhe.cn-beijing.fcapp.run/Ye2ienoo'
requests.post(url, data=msg)

14
api/api/keys.py Normal file
View File

@ -0,0 +1,14 @@
aliyun_prod_key = {
'access_key': 'LTAI5tC2qXGxwHZUZP7DoD1A',
'secret': 'qPo9O6ZvEfqo4t8oflGEm0DoxLHJhm',
'endpoint': 'https://oss-cn-shenzhen.aliyuncs.com',
'bucket': 'emblem-prod',
}
aliyun_dev_key = {
'access_key': 'LTAI5tC2qXGxwHZUZP7DoD1A',
'secret': 'qPo9O6ZvEfqo4t8oflGEm0DoxLHJhm',
'endpoint': 'https://oss-eu-west-1.aliyuncs.com',
'bucket': 'emblem-dev-1',
}

22
api/api/manage.py Executable file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env python3
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'emblemapi.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()

67
api/api/notify.py Executable file
View File

@ -0,0 +1,67 @@
#!/usr/bin/env python3
import json
import argparse
import smtplib
from keys import *
from email.message import EmailMessage
from aliyunsdkcore.client import AcsClient
from aliyunsdkcore.request import CommonRequest
FROM_ADDR = 'noreply@emblem-notify.euphon.net'
def send_email(to, subject, body):
msg = EmailMessage()
msg.set_content(body)
msg['Subject'] = subject
msg['From'] = FROM_ADDR
msg['To'] = to
s = smtplib.SMTP('smtpdm.aliyun.com')
s.login(FROM_ADDR, 'N72yBNi4cJw')
s.send_message(msg)
s.quit()
def send_sms_code(mobile, code):
ak = aliyun_prod_key['access_key']
sk = aliyun_prod_key['secret']
client = AcsClient(ak, sk, 'cn-shenzhen')
request = CommonRequest()
request.set_accept_format('json')
request.set_domain('dysmsapi.aliyuncs.com')
request.set_method('POST')
request.set_protocol_type('https') # https | http
request.set_version('2017-05-25')
request.set_action_name('SendSms')
request.add_query_param('RegionId', "cn-shenzhen")
request.add_query_param('PhoneNumbers', mobile)
request.add_query_param('SignName', "themblem")
request.add_query_param('TemplateCode', 'SMS_280201599')
request.add_query_param('TemplateParam', "{\"code\":\"%s\"}" % code)
print(request)
response = client.do_action(request).decode()
print(response)
r = json.loads(response)
if r.get("Code") != "OK":
raise Exception(r["Message"])
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--to")
parser.add_argument("--subject")
parser.add_argument("--body", default='no content')
parser.add_argument("--mobile")
parser.add_argument("--verify-code", '-c')
return parser.parse_args()
def main():
args = parse_args()
if args.to and args.subject:
send_email(args.to, args.subject, args.body)
if args.mobile and args.verify_code:
send_sms_code(args.mobile, args.verify_code)
if __name__ == "__main__":
main()

View File

21
api/api/products/admin.py Normal file
View File

@ -0,0 +1,21 @@
from django.contrib import admin
from .models import *
# Register your models here.
admin.site.register(CodeBatch)
admin.site.register(SerialCode)
admin.site.register(Product)
admin.site.register(Article)
admin.site.register(AdminInfo)
admin.site.register(SmsVerifiedAction)
admin.site.register(ScanData)
admin.site.register(EstorArchiveRecord)
admin.site.register(Job)
admin.site.register(ConsumerInfo)
class AuthTokenAdmin(admin.ModelAdmin):
search_fields = ['token']
admin.site.register(AuthToken, AuthTokenAdmin)
admin.site.register(GlobalConfig)

View File

@ -0,0 +1,39 @@
import json
import oss2
from aliyunsdkcore.client import AcsClient
from aliyunsdkcore.request import CommonRequest
from django.conf import settings
def oss_bucket(bucketname):
oss = settings.OSS
auth = oss2.Auth(oss['access_key'], oss['secret'])
bname = bucketname or oss['bucket']
bucket = oss2.Bucket(auth, oss['endpoint'], bname)
return bucket
def oss_put(name, f, bucket=None):
oss_bucket(bucket).put_object(name, f)
def oss_get(name, bucket=None):
try:
return oss_bucket(bucket).get_object(name).read()
except oss2.exceptions.NoSuchKey:
return None
def oss_sign_url(name, method='GET', bucket=None):
return oss_bucket(bucket).sign_url(method, name.encode(), 24 * 60 * 60)
def oss_has(name, bucket=None):
try:
obj = oss_bucket(bucket).get_object(name)
return True
except oss2.exceptions.NoSuchKey:
return False
def oss_stat(bucket=None):
bucket = oss_bucket(bucket)
stat = bucket.get_bucket_stat()
return {
'objects': stat.object_count,
'size': stat.storage_size_in_bytes,
}

6
api/api/products/apps.py Normal file
View File

@ -0,0 +1,6 @@
from django.apps import AppConfig
class ProductsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'products'

View File

@ -0,0 +1,45 @@
import json
from django.template import Template, Context
css_template = """
body {
background-color: {{ backgroun_color }};
}
.mce-content-body {
background-color: {{ backgroun_color }};
}
.article {
font-size: 1.5rem;
text-overflow: wrap;
color: #222;
background-color: {{ backgroun_color }};
}
.article a {
text-decoration: none;
color: #222;
}
.article a:hover {
text-decoration: none;
color: #222;
}
.article img {
max-width: 100%;
object-fit: contain;
}
"""
def gen_article_css(article):
try:
options = json.loads(article.options)
except:
options = {}
t = Template(css_template)
c = Context({
'backgroun_color': options.get('page_bg_color', '#ffffff')
})
return t.render(c)

View File

@ -0,0 +1,40 @@
import os
from django.conf import settings
import logging
import subprocess
from django.core.cache import cache
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
def request_to_region(request):
ip = get_client_ip(request)
if not ip:
logging.warning("Cannot get client IP from request: %s" % request)
return
return ip_to_region(ip)
def ip_to_region(ip):
ck = 'ip2region.' + ip
region = cache.get(ck)
if not region:
cmd = [os.path.join(settings.BASE_DIR, "../scripts/ip2region.py"), ip]
try:
out = subprocess.check_output(cmd).decode()
seen = set()
locs = []
for u in out.split('|'):
us = u.strip()
if us not in ['', '0', '中国'] and us not in seen:
seen.add(us)
locs.append(us)
region = '-'.join(locs)
except Exception as e:
region = "N/A"
cache.set(ck, region)
return region

View File

@ -0,0 +1,52 @@
from django.core.management.base import BaseCommand, CommandError
from products.models import *
class Command(BaseCommand):
help = 'Update batch binding with tenant'
def add_arguments(self, parser):
parser.add_argument('--batch', '-b', type=int)
parser.add_argument('--job', '-j', type=int)
def handle(self, *args, **options):
if options.get('job'):
job = Job.objects.get(pk=options['job'])
else:
job = Job.objects.create(name='bind-batch.%d' % options.get('batch'))
try:
job.update('running', 0.0)
b = CodeBatch.objects.get(pk=options['batch'])
t = b.tenant
q = SerialCode.objects.filter(batch=b)
total = q.count()
done = 0
batchsize = 1000
for objs in self.batch_iter(q, batchsize):
if t:
seq_num = t.alloc_seq_nums(batchsize)
for x in objs:
x.tenant = t
x.seq_num = seq_num
seq_num += 1
else:
for x in objs:
x.tenant = None
x.seq_num = None
SerialCode.objects.bulk_update(objs, ['tenant', 'seq_num'])
done += len(objs)
perc = done * 100.0 / total
job.update('running', perc)
print(f"bound {done} codes, total {total}")
job.update('done', 100.0)
except Exception as e:
job.update('error', message=str(e))
def batch_iter(self, q, batchsize=1000):
this_batch = []
for x in q.iterator():
this_batch.append(x)
if len(this_batch) >= batchsize:
yield this_batch
this_batch = []
if this_batch:
yield this_batch

View File

@ -0,0 +1,82 @@
from django.core.management.base import BaseCommand, CommandError
from products.models import *
from django.db import transaction
class Command(BaseCommand):
help = 'Manage serial code in batch'
def add_arguments(self, parser):
parser.add_argument('--tenant-id', '-t', required=True, type=int)
parser.add_argument('--job', '-j', type=int)
parser.add_argument('--all', '-A', action="store_true", help="apply to all code")
parser.add_argument('--seq-range', "-r", help="code by seq range")
parser.add_argument('--code-file', "-f", help="code list from file")
parser.add_argument('--activate', "-a", action="store_true", help="activate code")
parser.add_argument('--deactivate', "-d", action="store_true", help="deactivate code")
parser.add_argument('--bind-product', "-b", type=int, help="bind to product by id")
parser.add_argument('--unbind-product', "-u", action="store_true", help="unbind product")
def handle(self, *args, **options):
if options.get('job'):
job = Job.objects.get(pk=options['job'])
else:
job = Job.objects.create(name='bind-batch.%d' % options.get('batch'))
try:
job.update('running', 0.0)
tenant = Tenant.objects.get(pk=options['tenant_id'])
query = self.build_query(options)
total = query.count()
done = 0
prod = None
if options.get("bind_product"):
prod = Product.objects.get(tenant=tenant, pk=options['bind_product'])
for batch_ids in self.iterate_batches(query, 10000):
uq = SerialCode.objects.filter(pk__in=batch_ids)
if options.get("activate"):
uq.update(is_active=True)
elif options.get("deactivate"):
uq.update(is_active=False)
elif options.get("unbind_product"):
uq.update(product=None)
elif options.get("bind_product"):
uq.update(product=prod)
transaction.commit()
done += len(batch_ids)
print("code batch op", done, total)
perc = done * 100.0 / total
job.update('running', perc)
job.update('done', 100.0)
except Exception as e:
job.update('error', message=str(e))
raise
def iterate_batches(self, query, batchsize):
total = query.count()
cur = 0
while cur < total:
yield [x.pk for x in query[cur : cur + batchsize]]
cur += batchsize
def build_query(self, options):
tenant = Tenant.objects.get(pk=options['tenant_id'])
query = SerialCode.objects.filter(tenant=tenant).order_by('pk')
if options.get("seq_range"):
begin, end = options['seq_range'].split(',', maxsplit=1)
query = query.filter(seq_num__gte=int(begin),
seq_num__lte=int(end))
elif options.get("code_file"):
codes = self.read_code_file(options['code_file'])
query = query.filter(code__in=codes)
elif options.get("all"):
pass
else:
raise Exception("Code not specified")
return query
def read_code_file(self, cf):
with open(cf, 'r') as f:
return [x for x in f.read().splitlines() if not x.strip().startswith('#')]

View File

@ -0,0 +1,42 @@
from django.core.management.base import BaseCommand, CommandError
from products.models import *
class Command(BaseCommand):
help = 'Export batch to a file'
def add_arguments(self, parser):
parser.add_argument('--batch', '-b', type=int, required=True, help="batch id")
parser.add_argument('--output', '-o', required=True, help="output file")
parser.add_argument('--job', '-j', type=int, help="The job object to report progress")
parser.add_argument('--pattern', '-P', default='{code}', help="The pattern of each line, where {code} is replaced with the serial code")
def handle(self, *args, **options):
batch_id = options['batch']
if options.get('job'):
job = Job.objects.get(pk=options['job'])
else:
job = Job.objects.create(name='export-batch.%d' % batch_id)
try:
job.update('running', 0.0)
b = CodeBatch.objects.get(pk=batch_id)
fn = open(options['output'], 'w', encoding='utf-8')
ac = b.codes.all().order_by('seq_num')
total = ac.count()
done = 0
for c in ac.iterator():
line = options['pattern']
line = line.replace('{code}', c.code)
line = line.replace('{seq}', str(c.seq_num) if c.seq_num else "0")
fn.write(line + "\n")
done += 1
if done % 10000 == 0:
perc = done * 100.0 / total
print("[%d/%d %.1f%%]" % (done, total, perc))
job.update('running', perc)
print("all done, exported %d codes" % total)
fn.flush()
job.update('done', 100.0)
fn.flush()
except Exception as e:
job.update('error', message=str(e))

View File

@ -0,0 +1,67 @@
import time
import shutil
import tempfile
import subprocess
import requests
import datetime
import os
from django.utils import timezone
from django.core.management.base import BaseCommand, CommandError
from products.models import *
from products.aliyun import oss_sign_url
from django.core import serializers
class Command(BaseCommand):
help = 'Export scan data to a zip file'
def add_arguments(self, parser):
parser.add_argument('--output', '-o', required=True, help="output file")
parser.add_argument('--job', '-j', type=int, help="The job object to report progress")
parser.add_argument('--hours', '-H', type=int, help="how many hours")
def download(self, url, p):
r = requests.get(url, allow_redirects=True)
d = os.path.dirname(p)
os.makedirs(d, exist_ok=True)
with open(p, 'wb') as f:
f.write(r.content)
def do_export(self, job, workdir, hours, part_prog):
subprocess.check_output(["mkdir", "-p", "images"], cwd=workdir)
start = timezone.now() - datetime.timedelta(hours=hours)
records = ScanData.objects.filter(datetime__gt=start)
total = records.count() + 1
done = 0
print("total:", total)
for r in records:
url = oss_sign_url(r.image)
p = os.path.join(workdir, 'images', r.image)
done += 1
self.download(url, p)
perc = done * part_prog / total
print("[%d/%d %.1f%%]" % (done, total, perc))
job.update('running', perc)
j = serializers.serialize('json', records)
print(j)
with open(os.path.join(workdir, 'data.json'), 'w') as f:
f.write(j)
def handle(self, *args, **options):
if options.get('job'):
job = Job.objects.get(pk=options['job'])
else:
job = Job.objects.create(name='export-scan-data.%d' % time.time())
td = None
try:
job.update('running', 0.0)
td = tempfile.mkdtemp()
self.do_export(job, td, options['hours'], 80)
cmd = ['zip', '-r', os.path.abspath(options['output']), '.']
print(cmd)
subprocess.check_call(cmd, cwd=td)
job.update('done', 100.0)
except Exception as e:
job.update('error', message=str(e))
finally:
if td:
shutil.rmtree(td)

View File

@ -0,0 +1,59 @@
from django.core.management.base import BaseCommand, CommandError
from products.models import *
class Command(BaseCommand):
help = 'Import serial code from text file to a batch'
def add_arguments(self, parser):
parser.add_argument('--batch', '-b', required=True, type=int)
parser.add_argument('--file', '-f', required=True)
parser.add_argument('--job', '-j', type=int)
def file_batch_iter(self, path, batchsize):
this_batch = []
for x in open(path, 'r'):
x = x.strip()
if x.startswith('#'):
continue
this_batch.append(x)
if len(this_batch) >= batchsize:
yield this_batch
this_batch = []
yield this_batch
def handle(self, *args, **options):
if options.get('job'):
job = Job.objects.get(pk=options['job'])
else:
job = Job.objects.create(name='import-code.%d' % options.get('batch'))
try:
job.update('running', 0.0)
fn = options['file']
batchsize = 10000
total = sum([len(x) for x in self.file_batch_iter(fn, batchsize)])
done = 0
b = CodeBatch.objects.get(pk=options['batch'])
t = b.tenant
for lines in self.file_batch_iter(fn, batchsize):
objs = []
if t:
seq_num = t.alloc_seq_nums(batchsize)
else:
seq_num = None
for code in lines:
obj = SerialCode(code=code, batch=b, tenant=t, seq_num=seq_num, is_active=b.is_active)
if seq_num:
seq_num += 1
objs.append(obj)
done += len(objs)
perc = done * 100.0 / total
SerialCode.objects.bulk_create(objs, ignore_conflicts=True)
print("[%d / %d]" % (done, total))
job.update('running', perc)
job.update('done', 100.0)
except Exception as e:
print(e)
job.update('error', message=str(e))
finally:
print("deleting", fn)
os.unlink(fn)

View File

@ -0,0 +1,59 @@
import json
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from products.sendmsg import send_user_message
from products.models import *
class Command(BaseCommand):
help = 'Check repeated QR verify request and send alert'
def add_arguments(self, parser):
parser.add_argument('code')
def handle(self, *args, **options):
code = options['code']
sc = SerialCode.objects.filter(code=code, is_active=True).first()
if not sc:
return
trigger = False
for a in AdminInfo.objects.filter(qr_verify_alert_rule__isnull=False):
if self.check_one(a, sc):
trigger = True
if trigger:
self.disable_code(sc)
def check_one(self, admin, sc):
rule = admin.qr_verify_alert_rule
if not rule:
return
rule = json.loads(rule)
time_window_seconds = rule.get('time_window_seconds')
repeat_threshold = rule.get('repeat_threshold')
if not time_window_seconds or not repeat_threshold:
return
start = timezone.now() - datetime.timedelta(seconds=time_window_seconds)
records = ScanData.objects.filter(code=sc.code, datetime__gte=start)
if records.count() >= repeat_threshold:
self.alert_one(admin, sc, records)
return True
def disable_code(self, sc):
sc.is_active = False
sc.save()
def alert_one(self, admin, sc, records):
subject = "重复验证报警: 序列码 %s 最近已重复 %d 次验证" % (sc.code, records.count())
print(subject)
lines = [
"序列码: %s" % sc.code,
"租户: %s" % (sc.tenant.username if sc.tenant else ""),
"产品: %s" % (sc.product.name if sc.product else ""),
"",
"近期验证记录:"]
for r in records:
lines.append("%s, %s, %s" % (r.datetime, r.location, r.ip))
lines.append("")
lines.append("验证码已冻结")
send_user_message(admin, subject, "\n".join(lines))

View File

@ -0,0 +1,11 @@
from django.core.management.base import BaseCommand, CommandError
from euphon import send_alert
class Command(BaseCommand):
help = 'Send alert message to euphon ops channel'
def add_arguments(self, parser):
parser.add_argument('msg', nargs="+")
def handle(self, *args, **options):
send_alert("\n".join(options['msg']))

View File

@ -0,0 +1,15 @@
from django.core.management.base import BaseCommand, CommandError
from products.models import *
from products.sendmsg import send_email
from django.db import transaction
class Command(BaseCommand):
help = 'Send a message to admin/tenant'
def add_arguments(self, parser):
parser.add_argument('to')
parser.add_argument('subject')
parser.add_argument('content')
def handle(self, *args, **options):
send_email(options['to'], options['subject'], options['content'])

View File

@ -0,0 +1,31 @@
from django.core.management.base import BaseCommand, CommandError
from products.models import *
from products.sendmsg import send_user_message, admin_broadcast
from django.db import transaction
class Command(BaseCommand):
help = 'Send a message to admin/tenant'
def add_arguments(self, parser):
parser.add_argument('--all-admin', '-A', action="store_true")
parser.add_argument('--admin', '-a')
parser.add_argument('--tenant', '-t')
parser.add_argument('--subject', '-s', required=True)
parser.add_argument('--content', '-c', default='')
def handle(self, *args, **options):
admin = options['admin']
tenant = options['tenant']
subject = options['subject']
content = options['content']
whom = None
if options.get('all_admin'):
admin_broadcast(subject, content)
return
if admin:
whom = AdminInfo.objects.get(user__username=admin)
elif tenant:
whom = Tenant.objects.get(username=tenant)
else:
raise Exception("Must specify either admin or tenant name")
send_user_message(whom, subject, content)

View File

@ -0,0 +1,12 @@
# Generated by Django 3.2.11 on 2022-02-04 22:03
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
]

View File

@ -0,0 +1,146 @@
# Generated by Django 3.2.11 on 2022-02-05 11:33
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CodeBatch',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('qr_angle', models.FloatField(default=0.0)),
('datetime', models.DateTimeField(auto_now=True)),
('code_prefix', models.CharField(max_length=64)),
],
),
migrations.CreateModel(
name='ConsumerInfo',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now=True)),
('username', models.CharField(max_length=128)),
('gender', models.CharField(blank=True, max_length=128, null=True)),
('country', models.CharField(blank=True, max_length=128, null=True)),
('province', models.CharField(blank=True, max_length=128, null=True)),
('city', models.CharField(blank=True, max_length=128, null=True)),
],
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime', models.DateTimeField(auto_now=True)),
('kind', models.CharField(max_length=128)),
('params', models.TextField()),
],
),
migrations.CreateModel(
name='PageTemplate',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('template', models.TextField()),
('params', models.TextField()),
],
),
migrations.CreateModel(
name='Stat',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('kind', models.CharField(max_length=128)),
('params', models.TextField()),
('count', models.IntegerField(default=1)),
],
),
migrations.CreateModel(
name='Tenant',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(db_index=True, max_length=128, unique=True, verbose_name='用户名')),
('mobile', models.CharField(blank=True, max_length=128, null=True, unique=True, verbose_name='手机号')),
('password', models.CharField(max_length=256, verbose_name='密码')),
('token', models.CharField(db_index=True, max_length=128, unique=True, verbose_name='API Token')),
],
),
migrations.CreateModel(
name='SerialCode',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(db_index=True, max_length=128, unique=True)),
('batch', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='codes', to='products.codebatch')),
],
),
migrations.CreateModel(
name='ScanData',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime', models.DateTimeField(auto_now=True)),
('ip', models.CharField(max_length=64)),
('kind', models.CharField(max_length=128)),
('params', models.TextField()),
('image', models.TextField()),
('consumer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scans', to='products.consumerinfo')),
],
),
migrations.CreateModel(
name='ProductPage',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('html', models.TextField(default='<html><body>no content</body></html>')),
('arguments', models.TextField(blank=True, null=True)),
('template', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='pages', to='products.pagetemplate')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, verbose_name='名称')),
('description', models.TextField(verbose_name='产品描述')),
('counterfeit_result_page', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='counterfeit_page', to='products.productpage', verbose_name='假货页面')),
('genuine_result_page', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='genuine_page', to='products.productpage', verbose_name='产品信息页面')),
('tenant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='products', to='products.tenant')),
],
options={
'unique_together': {('tenant', 'name')},
},
),
migrations.AddField(
model_name='pagetemplate',
name='tenant',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='templates', to='products.tenant'),
),
migrations.CreateModel(
name='Media',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mime_type', models.CharField(max_length=128)),
('uri', models.TextField()),
('tenant', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='media', to='products.tenant')),
],
),
migrations.AddField(
model_name='codebatch',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='batches', to='products.product'),
),
migrations.CreateModel(
name='AdminInfo',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('token', models.CharField(db_index=True, max_length=128, null=True, unique=True, verbose_name='API Token')),
('mobile', models.CharField(blank=True, max_length=128, null=True, unique=True, verbose_name='手机号')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='token', to=settings.AUTH_USER_MODEL)),
],
),
]

View File

@ -0,0 +1,44 @@
# Generated by Django 3.2.11 on 2022-02-09 20:25
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0002_initial'),
]
operations = [
migrations.AddField(
model_name='pagetemplate',
name='name',
field=models.CharField(blank=True, db_index=True, max_length=128, null=True),
),
migrations.AlterField(
model_name='codebatch',
name='code_prefix',
field=models.CharField(max_length=64, verbose_name='序列码前缀'),
),
migrations.AlterField(
model_name='codebatch',
name='datetime',
field=models.DateTimeField(auto_now=True, verbose_name='创建日期'),
),
migrations.AlterField(
model_name='codebatch',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='batches', to='products.product', verbose_name='产品'),
),
migrations.AlterField(
model_name='codebatch',
name='qr_angle',
field=models.FloatField(default=0.0, verbose_name='网线角度'),
),
migrations.AlterField(
model_name='productpage',
name='html',
field=models.TextField(default='<span class="no-content">no content</span>'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-02-11 21:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0003_auto_20220209_2025'),
]
operations = [
migrations.AddField(
model_name='tenant',
name='password_reset_code',
field=models.CharField(blank=True, db_index=True, max_length=128, null=True),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-02-11 21:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0004_tenant_password_reset_code'),
]
operations = [
migrations.AddField(
model_name='tenant',
name='password_reset_code_expire',
field=models.DateTimeField(blank=True, null=True),
),
]

View File

@ -0,0 +1,21 @@
# Generated by Django 3.2.11 on 2022-02-23 20:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0005_tenant_password_reset_code_expire'),
]
operations = [
migrations.CreateModel(
name='SystemLog',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime', models.DateTimeField(auto_now=True)),
('log', models.TextField()),
],
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-02-23 21:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0006_systemlog'),
]
operations = [
migrations.AlterField(
model_name='systemlog',
name='datetime',
field=models.DateTimeField(auto_now=True, db_index=True),
),
]

View File

@ -0,0 +1,23 @@
# Generated by Django 3.2.12 on 2022-02-24 21:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0007_alter_systemlog_datetime'),
]
operations = [
migrations.CreateModel(
name='Counter',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=128, unique=True)),
('params', models.TextField(null=True)),
('datetime', models.DateTimeField(auto_now=True)),
('count', models.IntegerField()),
],
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.12 on 2022-02-24 21:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0008_counter'),
]
operations = [
migrations.AlterField(
model_name='counter',
name='name',
field=models.CharField(db_index=True, max_length=128),
),
]

View File

@ -0,0 +1,27 @@
# Generated by Django 3.2.12 on 2022-03-03 20:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0009_alter_counter_name'),
]
operations = [
migrations.AddField(
model_name='consumerinfo',
name='platform',
field=models.CharField(default='wechat', max_length=128),
),
migrations.AlterField(
model_name='consumerinfo',
name='username',
field=models.CharField(default='[匿名用户]', max_length=128),
),
migrations.AlterUniqueTogether(
name='consumerinfo',
unique_together={('platform', 'username')},
),
]

View File

@ -0,0 +1,34 @@
# Generated by Django 3.2.11 on 2022-03-06 15:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0010_auto_20220303_2039'),
]
operations = [
migrations.AddField(
model_name='scandata',
name='product',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='scans', to='products.product'),
),
migrations.AlterField(
model_name='pagetemplate',
name='name',
field=models.CharField(blank=True, db_index=True, max_length=128, null=True, verbose_name='名称'),
),
migrations.AlterField(
model_name='product',
name='description',
field=models.TextField(verbose_name='备注'),
),
migrations.AlterField(
model_name='serialcode',
name='code',
field=models.CharField(db_index=True, max_length=128, unique=True, verbose_name='序列码'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-06 15:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0011_auto_20220306_1552'),
]
operations = [
migrations.AlterField(
model_name='productpage',
name='html',
field=models.TextField(default='暂无内容'),
),
]

View File

@ -0,0 +1,26 @@
# Generated by Django 3.2.11 on 2022-03-06 19:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0012_alter_productpage_html'),
]
operations = [
migrations.CreateModel(
name='AssetFile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filename', models.TextField()),
('data', models.BinaryField()),
('mime_type', models.CharField(default='application/octet-stream', max_length=256)),
('usage', models.CharField(max_length=256)),
],
),
migrations.DeleteModel(
name='Media',
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-06 19:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0013_auto_20220306_1938'),
]
operations = [
migrations.AddField(
model_name='assetfile',
name='properties',
field=models.TextField(default=''),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-06 20:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0014_assetfile_properties'),
]
operations = [
migrations.AlterField(
model_name='assetfile',
name='properties',
field=models.TextField(null=True),
),
]

View File

@ -0,0 +1,23 @@
# Generated by Django 3.2.11 on 2022-03-07 20:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0015_alter_assetfile_properties'),
]
operations = [
migrations.AlterField(
model_name='consumerinfo',
name='platform',
field=models.CharField(blank=True, default='wechat', max_length=128, null=True),
),
migrations.AlterField(
model_name='consumerinfo',
name='username',
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='用户名'),
),
]

View File

@ -0,0 +1,36 @@
# Generated by Django 3.2.11 on 2022-03-09 19:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0016_auto_20220307_2001'),
]
operations = [
migrations.DeleteModel(
name='Event',
),
migrations.DeleteModel(
name='Stat',
),
migrations.RemoveField(
model_name='consumerinfo',
name='city',
),
migrations.RemoveField(
model_name='consumerinfo',
name='country',
),
migrations.RemoveField(
model_name='consumerinfo',
name='province',
),
migrations.AddField(
model_name='consumerinfo',
name='ip',
field=models.CharField(blank=True, max_length=128, null=True),
),
]

View File

@ -0,0 +1,23 @@
# Generated by Django 3.2.11 on 2022-03-09 20:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0017_auto_20220309_1937'),
]
operations = [
migrations.AddField(
model_name='counter',
name='accumulated',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='counter',
name='datetime',
field=models.DateTimeField(auto_now=True, db_index=True),
),
]

View File

@ -0,0 +1,17 @@
# Generated by Django 3.2.11 on 2022-03-09 20:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0018_auto_20220309_2024'),
]
operations = [
migrations.RemoveField(
model_name='counter',
name='accumulated',
),
]

View File

@ -0,0 +1,38 @@
# Generated by Django 3.2.11 on 2022-03-09 20:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0019_remove_counter_accumulated'),
]
operations = [
migrations.AlterField(
model_name='codebatch',
name='datetime',
field=models.DateTimeField(auto_now_add=True, verbose_name='创建日期'),
),
migrations.AlterField(
model_name='consumerinfo',
name='create_time',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='counter',
name='datetime',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
migrations.AlterField(
model_name='scandata',
name='datetime',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='systemlog',
name='datetime',
field=models.DateTimeField(auto_now_add=True, db_index=True),
),
]

View File

@ -0,0 +1,24 @@
# Generated by Django 3.2.11 on 2022-03-12 22:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0020_auto_20220309_2038'),
]
operations = [
migrations.AddField(
model_name='codebatch',
name='description',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='codebatch',
name='product',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='batches', to='products.product', verbose_name='产品'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-12 22:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0021_auto_20220312_2238'),
]
operations = [
migrations.AddField(
model_name='codebatch',
name='is_active',
field=models.BooleanField(default=True),
),
]

View File

@ -0,0 +1,19 @@
# Generated by Django 3.2.11 on 2022-03-13 08:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0022_codebatch_is_active'),
]
operations = [
migrations.AddField(
model_name='codebatch',
name='tenant',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='codebatches', to='products.tenant'),
),
]

View File

@ -0,0 +1,45 @@
# Generated by Django 3.2.11 on 2022-03-15 08:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0023_codebatch_tenant'),
]
operations = [
migrations.CreateModel(
name='GlobalConfig',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=128, unique=True, verbose_name='配置名称')),
('value', models.TextField(null=True, verbose_name='配置内容')),
],
),
migrations.RemoveField(
model_name='consumerinfo',
name='gender',
),
migrations.AddField(
model_name='tenant',
name='welcome_page_config',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='codebatch',
name='description',
field=models.TextField(blank=True, null=True, verbose_name='备注'),
),
migrations.AlterField(
model_name='codebatch',
name='is_active',
field=models.BooleanField(default=True, verbose_name='已激活'),
),
migrations.AlterField(
model_name='consumerinfo',
name='ip',
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='IP'),
),
]

View File

@ -0,0 +1,28 @@
# Generated by Django 3.2.11 on 2022-03-15 20:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0024_auto_20220315_0817'),
]
operations = [
migrations.RemoveField(
model_name='assetfile',
name='data',
),
migrations.AddField(
model_name='assetfile',
name='tenant',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='assets', to='products.tenant'),
),
migrations.AddField(
model_name='assetfile',
name='uuid',
field=models.CharField(blank=True, max_length=256, null=True),
),
]

View File

@ -0,0 +1,19 @@
# Generated by Django 3.2.11 on 2022-03-15 20:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0025_auto_20220315_2039'),
]
operations = [
migrations.AlterField(
model_name='assetfile',
name='uuid',
field=models.CharField(default='', max_length=256),
preserve_default=False,
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-15 22:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0026_alter_assetfile_uuid'),
]
operations = [
migrations.AddField(
model_name='tenant',
name='display_name',
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='显示名称'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-19 21:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0027_tenant_display_name'),
]
operations = [
migrations.AddField(
model_name='assetfile',
name='url',
field=models.TextField(blank=True, null=True),
),
]

View File

@ -0,0 +1,23 @@
# Generated by Django 3.2.11 on 2022-03-21 19:45
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0028_assetfile_url'),
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, max_length=128, null=True, verbose_name='标题')),
('body', models.TextField()),
('tenant', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='articles', to='products.tenant')),
],
),
]

View File

@ -0,0 +1,30 @@
# Generated by Django 3.2.11 on 2022-03-23 08:09
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('products', '0029_article'),
]
operations = [
migrations.AddField(
model_name='assetfile',
name='datetime',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, verbose_name='创建日期'),
preserve_default=False,
),
migrations.AlterField(
model_name='assetfile',
name='filename',
field=models.TextField(verbose_name='文件名'),
),
migrations.AlterField(
model_name='scandata',
name='datetime',
field=models.DateTimeField(auto_now_add=True, verbose_name='时间'),
),
]

View File

@ -0,0 +1,23 @@
# Generated by Django 3.2.11 on 2022-03-23 08:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0030_auto_20220323_0809'),
]
operations = [
migrations.AddField(
model_name='assetfile',
name='title',
field=models.TextField(blank=True, null=True, verbose_name='标题'),
),
migrations.AlterField(
model_name='assetfile',
name='url',
field=models.TextField(blank=True, null=True, verbose_name='链接'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-23 09:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0031_auto_20220323_0841'),
]
operations = [
migrations.AddField(
model_name='assetfile',
name='order',
field=models.IntegerField(blank=True, default=1, null=True, verbose_name='顺序'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-23 09:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0032_assetfile_order'),
]
operations = [
migrations.RenameField(
model_name='assetfile',
old_name='url',
new_name='link',
),
]

View File

@ -0,0 +1,22 @@
# Generated by Django 3.2.11 on 2022-03-23 20:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0033_rename_url_assetfile_link'),
]
operations = [
migrations.RemoveField(
model_name='assetfile',
name='order',
),
migrations.AddField(
model_name='assetfile',
name='position',
field=models.IntegerField(blank=True, default=1, null=True, verbose_name='位置'),
),
]

View File

@ -0,0 +1,18 @@
# Generated by Django 3.2.11 on 2022-03-23 20:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0034_auto_20220323_2015'),
]
operations = [
migrations.AddField(
model_name='assetfile',
name='external_url',
field=models.TextField(blank=True, null=True),
),
]

View File

@ -0,0 +1,22 @@
# Generated by Django 3.2.11 on 2022-03-23 21:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('products', '0035_assetfile_external_url'),
]
operations = [
migrations.CreateModel(
name='MiniProgramContent',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('tenant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='mini_program_content', to='products.tenant', unique=True)),
],
),
]

Some files were not shown because too many files have changed in this diff Show More