init project
This commit is contained in:
commit
2b2bac9432
19
.gitignore
vendored
Normal file
19
.gitignore
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
.rebar3
|
||||
_*
|
||||
.eunit
|
||||
*.o
|
||||
*.beam
|
||||
*.plt
|
||||
*.swp
|
||||
*.swo
|
||||
.erlang.cookie
|
||||
ebin
|
||||
log
|
||||
erl_crash.dump
|
||||
.rebar
|
||||
logs
|
||||
.idea
|
||||
*.iml
|
||||
rebar3.crashdump
|
||||
*~
|
||||
config/sys.config
|
||||
12
Dockerfile
Normal file
12
Dockerfile
Normal file
@ -0,0 +1,12 @@
|
||||
FROM erlang:25.3
|
||||
|
||||
RUN mkdir -p /usr/local/var/mnesia/
|
||||
|
||||
ADD _build/default/rel/iot/iot-0.1.0.tar.gz /data/iot/
|
||||
|
||||
VOLUME /data/iot/log/
|
||||
VOLUME /usr/local/var/mnesia/
|
||||
|
||||
WORKDIR /data/iot
|
||||
|
||||
CMD /data/iot/bin/iot foreground
|
||||
191
LICENSE
Normal file
191
LICENSE
Normal file
@ -0,0 +1,191 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
Copyright 2023, 安礼成 <licheng5@staff.weibo.com>.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
12
README.md
Normal file
12
README.md
Normal file
@ -0,0 +1,12 @@
|
||||
iot
|
||||
=====
|
||||
|
||||
An OTP application
|
||||
|
||||
## erlang client sdk
|
||||
https://github.com/emqx/emqtt
|
||||
|
||||
Build
|
||||
-----
|
||||
|
||||
$ rebar3 compile
|
||||
535
apps/iot/include/emqtt.hrl
Normal file
535
apps/iot/include/emqtt.hrl
Normal file
@ -0,0 +1,535 @@
|
||||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-ifndef(EMQTT_HRL).
|
||||
-define(EMQTT_HRL, true).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Protocol Version and Names
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(MQTT_PROTO_V3, 3).
|
||||
-define(MQTT_PROTO_V4, 4).
|
||||
-define(MQTT_PROTO_V5, 5).
|
||||
|
||||
-define(PROTOCOL_NAMES, [
|
||||
{?MQTT_PROTO_V3, <<"MQIsdp">>},
|
||||
{?MQTT_PROTO_V4, <<"MQTT">>},
|
||||
{?MQTT_PROTO_V5, <<"MQTT">>}]).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT QoS Levels
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(QOS_0, 0). %% At most once
|
||||
-define(QOS_1, 1). %% At least once
|
||||
-define(QOS_2, 2). %% Exactly once
|
||||
|
||||
-define(IS_QOS(I), (I >= ?QOS_0 andalso I =< ?QOS_2)).
|
||||
|
||||
-define(QOS_I(Name),
|
||||
begin
|
||||
(case Name of
|
||||
?QOS_0 -> ?QOS_0;
|
||||
qos0 -> ?QOS_0;
|
||||
at_most_once -> ?QOS_0;
|
||||
?QOS_1 -> ?QOS_1;
|
||||
qos1 -> ?QOS_1;
|
||||
at_least_once -> ?QOS_1;
|
||||
?QOS_2 -> ?QOS_2;
|
||||
qos2 -> ?QOS_2;
|
||||
exactly_once -> ?QOS_2
|
||||
end)
|
||||
end).
|
||||
|
||||
-define(IS_QOS_NAME(I),
|
||||
(I =:= qos0 orelse I =:= at_most_once orelse
|
||||
I =:= qos1 orelse I =:= at_least_once orelse
|
||||
I =:= qos2 orelse I =:= exactly_once)).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Maximum ClientId Length.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(MAX_CLIENTID_LEN, 65535).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Control Packet Types
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(RESERVED, 0). %% Reserved
|
||||
-define(CONNECT, 1). %% Client request to connect to Server
|
||||
-define(CONNACK, 2). %% Server to Client: Connect acknowledgment
|
||||
-define(PUBLISH, 3). %% Publish message
|
||||
-define(PUBACK, 4). %% Publish acknowledgment
|
||||
-define(PUBREC, 5). %% Publish received (assured delivery part 1)
|
||||
-define(PUBREL, 6). %% Publish release (assured delivery part 2)
|
||||
-define(PUBCOMP, 7). %% Publish complete (assured delivery part 3)
|
||||
-define(SUBSCRIBE, 8). %% Client subscribe request
|
||||
-define(SUBACK, 9). %% Server Subscribe acknowledgment
|
||||
-define(UNSUBSCRIBE, 10). %% Unsubscribe request
|
||||
-define(UNSUBACK, 11). %% Unsubscribe acknowledgment
|
||||
-define(PINGREQ, 12). %% PING request
|
||||
-define(PINGRESP, 13). %% PING response
|
||||
-define(DISCONNECT, 14). %% Client or Server is disconnecting
|
||||
-define(AUTH, 15). %% Authentication exchange
|
||||
|
||||
-define(TYPE_NAMES, [
|
||||
'CONNECT',
|
||||
'CONNACK',
|
||||
'PUBLISH',
|
||||
'PUBACK',
|
||||
'PUBREC',
|
||||
'PUBREL',
|
||||
'PUBCOMP',
|
||||
'SUBSCRIBE',
|
||||
'SUBACK',
|
||||
'UNSUBSCRIBE',
|
||||
'UNSUBACK',
|
||||
'PINGREQ',
|
||||
'PINGRESP',
|
||||
'DISCONNECT',
|
||||
'AUTH']).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT V3.1.1 Connect Return Codes
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(CONNACK_ACCEPT, 0). %% Connection accepted
|
||||
-define(CONNACK_PROTO_VER, 1). %% Unacceptable protocol version
|
||||
-define(CONNACK_INVALID_ID, 2). %% Client Identifier is correct UTF-8 but not allowed by the Server
|
||||
-define(CONNACK_SERVER, 3). %% Server unavailable
|
||||
-define(CONNACK_CREDENTIALS, 4). %% Username or password is malformed
|
||||
-define(CONNACK_AUTH, 5). %% Client is not authorized to connect
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT V5.0 Reason Codes
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(RC_SUCCESS, 16#00).
|
||||
-define(RC_NORMAL_DISCONNECTION, 16#00).
|
||||
-define(RC_GRANTED_QOS_0, 16#00).
|
||||
-define(RC_GRANTED_QOS_1, 16#01).
|
||||
-define(RC_GRANTED_QOS_2, 16#02).
|
||||
-define(RC_DISCONNECT_WITH_WILL_MESSAGE, 16#04).
|
||||
-define(RC_NO_MATCHING_SUBSCRIBERS, 16#10).
|
||||
-define(RC_NO_SUBSCRIPTION_EXISTED, 16#11).
|
||||
-define(RC_CONTINUE_AUTHENTICATION, 16#18).
|
||||
-define(RC_RE_AUTHENTICATE, 16#19).
|
||||
-define(RC_UNSPECIFIED_ERROR, 16#80).
|
||||
-define(RC_MALFORMED_PACKET, 16#81).
|
||||
-define(RC_PROTOCOL_ERROR, 16#82).
|
||||
-define(RC_IMPLEMENTATION_SPECIFIC_ERROR, 16#83).
|
||||
-define(RC_UNSUPPORTED_PROTOCOL_VERSION, 16#84).
|
||||
-define(RC_CLIENT_IDENTIFIER_NOT_VALID, 16#85).
|
||||
-define(RC_BAD_USER_NAME_OR_PASSWORD, 16#86).
|
||||
-define(RC_NOT_AUTHORIZED, 16#87).
|
||||
-define(RC_SERVER_UNAVAILABLE, 16#88).
|
||||
-define(RC_SERVER_BUSY, 16#89).
|
||||
-define(RC_BANNED, 16#8A).
|
||||
-define(RC_SERVER_SHUTTING_DOWN, 16#8B).
|
||||
-define(RC_BAD_AUTHENTICATION_METHOD, 16#8C).
|
||||
-define(RC_KEEP_ALIVE_TIMEOUT, 16#8D).
|
||||
-define(RC_SESSION_TAKEN_OVER, 16#8E).
|
||||
-define(RC_TOPIC_FILTER_INVALID, 16#8F).
|
||||
-define(RC_TOPIC_NAME_INVALID, 16#90).
|
||||
-define(RC_PACKET_IDENTIFIER_IN_USE, 16#91).
|
||||
-define(RC_PACKET_IDENTIFIER_NOT_FOUND, 16#92).
|
||||
-define(RC_RECEIVE_MAXIMUM_EXCEEDED, 16#93).
|
||||
-define(RC_TOPIC_ALIAS_INVALID, 16#94).
|
||||
-define(RC_PACKET_TOO_LARGE, 16#95).
|
||||
-define(RC_MESSAGE_RATE_TOO_HIGH, 16#96).
|
||||
-define(RC_QUOTA_EXCEEDED, 16#97).
|
||||
-define(RC_ADMINISTRATIVE_ACTION, 16#98).
|
||||
-define(RC_PAYLOAD_FORMAT_INVALID, 16#99).
|
||||
-define(RC_RETAIN_NOT_SUPPORTED, 16#9A).
|
||||
-define(RC_QOS_NOT_SUPPORTED, 16#9B).
|
||||
-define(RC_USE_ANOTHER_SERVER, 16#9C).
|
||||
-define(RC_SERVER_MOVED, 16#9D).
|
||||
-define(RC_SHARED_SUBSCRIPTIONS_NOT_SUPPORTED, 16#9E).
|
||||
-define(RC_CONNECTION_RATE_EXCEEDED, 16#9F).
|
||||
-define(RC_MAXIMUM_CONNECT_TIME, 16#A0).
|
||||
-define(RC_SUBSCRIPTION_IDENTIFIERS_NOT_SUPPORTED, 16#A1).
|
||||
-define(RC_WILDCARD_SUBSCRIPTIONS_NOT_SUPPORTED, 16#A2).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Maximum MQTT Packet ID and Length
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(MAX_PACKET_ID, 16#ffff).
|
||||
-define(MAX_PACKET_SIZE, 16#fffffff).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Frame Mask
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(HIGHBIT, 2#10000000).
|
||||
-define(LOWBITS, 2#01111111).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Packet Fixed Header
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-record(mqtt_packet_header, {
|
||||
type = ?RESERVED,
|
||||
dup = false,
|
||||
qos = ?QOS_0,
|
||||
retain = false
|
||||
}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Packets
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(DEFAULT_SUBOPTS, #{rh => 0, %% Retain Handling
|
||||
rap => 0, %% Retain as Publish
|
||||
nl => 0, %% No Local
|
||||
qos => 0 %% QoS
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_connect, {
|
||||
proto_name = <<"MQTT">>,
|
||||
proto_ver = ?MQTT_PROTO_V4,
|
||||
is_bridge = false,
|
||||
clean_start = true,
|
||||
will_flag = false,
|
||||
will_qos = ?QOS_0,
|
||||
will_retain = false,
|
||||
keepalive = 0,
|
||||
properties = undefined,
|
||||
clientid = <<>>,
|
||||
will_props = undefined,
|
||||
will_topic = undefined,
|
||||
will_payload = undefined,
|
||||
username = undefined,
|
||||
password = undefined
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_connack, {
|
||||
ack_flags,
|
||||
reason_code,
|
||||
properties
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_publish, {
|
||||
topic_name,
|
||||
packet_id,
|
||||
properties
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_puback, {
|
||||
packet_id,
|
||||
reason_code,
|
||||
properties
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_subscribe, {
|
||||
packet_id,
|
||||
properties,
|
||||
topic_filters
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_suback, {
|
||||
packet_id,
|
||||
properties,
|
||||
reason_codes
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_unsubscribe, {
|
||||
packet_id,
|
||||
properties,
|
||||
topic_filters
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_unsuback, {
|
||||
packet_id,
|
||||
properties,
|
||||
reason_codes
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_disconnect, {
|
||||
reason_code,
|
||||
properties
|
||||
}).
|
||||
|
||||
-record(mqtt_packet_auth, {
|
||||
reason_code,
|
||||
properties
|
||||
}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Message
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-record(mqtt_msg, {
|
||||
qos = ?QOS_0 :: emqtt:qos(),
|
||||
retain = false :: boolean(),
|
||||
dup = false :: boolean(),
|
||||
packet_id :: emqtt:packet_id(),
|
||||
topic :: emqtt:topic(),
|
||||
props :: emqtt:properties(),
|
||||
payload :: binary()
|
||||
}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Control Packet
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-record(mqtt_packet, {
|
||||
header :: #mqtt_packet_header{},
|
||||
variable :: #mqtt_packet_connect{}
|
||||
| #mqtt_packet_connack{}
|
||||
| #mqtt_packet_publish{}
|
||||
| #mqtt_packet_puback{}
|
||||
| #mqtt_packet_subscribe{}
|
||||
| #mqtt_packet_suback{}
|
||||
| #mqtt_packet_unsubscribe{}
|
||||
| #mqtt_packet_unsuback{}
|
||||
| #mqtt_packet_disconnect{}
|
||||
| #mqtt_packet_auth{}
|
||||
| pos_integer()
|
||||
| undefined,
|
||||
payload :: binary() | undefined
|
||||
}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% MQTT Packet Match
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-define(CONNECT_PACKET(Var),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?CONNECT},
|
||||
variable = Var}).
|
||||
|
||||
-define(CONNACK_PACKET(ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?CONNACK},
|
||||
variable = #mqtt_packet_connack{ack_flags = 0,
|
||||
reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(CONNACK_PACKET(ReasonCode, SessPresent),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?CONNACK},
|
||||
variable = #mqtt_packet_connack{ack_flags = SessPresent,
|
||||
reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(CONNACK_PACKET(ReasonCode, SessPresent, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?CONNACK},
|
||||
variable = #mqtt_packet_connack{ack_flags = SessPresent,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(AUTH_PACKET(),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?AUTH},
|
||||
variable = #mqtt_packet_auth{reason_code = 0}
|
||||
}).
|
||||
|
||||
-define(AUTH_PACKET(ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?AUTH},
|
||||
variable = #mqtt_packet_auth{reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(AUTH_PACKET(ReasonCode, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?AUTH},
|
||||
variable = #mqtt_packet_auth{reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(PUBLISH_PACKET(QoS),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH, qos = QoS}}).
|
||||
|
||||
-define(PUBLISH_PACKET(QoS, PacketId),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH,
|
||||
qos = QoS},
|
||||
variable = #mqtt_packet_publish{packet_id = PacketId}
|
||||
}).
|
||||
|
||||
-define(PUBLISH_PACKET(QoS, Topic, PacketId, Payload),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH,
|
||||
qos = QoS},
|
||||
variable = #mqtt_packet_publish{topic_name = Topic,
|
||||
packet_id = PacketId},
|
||||
payload = Payload
|
||||
}).
|
||||
|
||||
-define(PUBLISH_PACKET(QoS, Topic, PacketId, Properties, Payload),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBLISH,
|
||||
qos = QoS},
|
||||
variable = #mqtt_packet_publish{topic_name = Topic,
|
||||
packet_id = PacketId,
|
||||
properties = Properties},
|
||||
payload = Payload
|
||||
}).
|
||||
|
||||
-define(PUBACK_PACKET(PacketId),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBACK},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = 0}
|
||||
}).
|
||||
|
||||
-define(PUBACK_PACKET(PacketId, ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBACK},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(PUBACK_PACKET(PacketId, ReasonCode, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBACK},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(PUBREC_PACKET(PacketId),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBREC},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = 0}
|
||||
}).
|
||||
|
||||
-define(PUBREC_PACKET(PacketId, ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBREC},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(PUBREC_PACKET(PacketId, ReasonCode, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBREC},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(PUBREL_PACKET(PacketId),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBREL,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = 0}
|
||||
}).
|
||||
|
||||
-define(PUBREL_PACKET(PacketId, ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBREL,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(PUBREL_PACKET(PacketId, ReasonCode, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBREL,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(PUBCOMP_PACKET(PacketId),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBCOMP},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = 0}
|
||||
}).
|
||||
|
||||
-define(PUBCOMP_PACKET(PacketId, ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBCOMP},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(PUBCOMP_PACKET(PacketId, ReasonCode, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?PUBCOMP},
|
||||
variable = #mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(SUBSCRIBE_PACKET(PacketId, TopicFilters),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?SUBSCRIBE,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_subscribe{packet_id = PacketId,
|
||||
topic_filters = TopicFilters}
|
||||
}).
|
||||
|
||||
-define(SUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?SUBSCRIBE,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_subscribe{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
topic_filters = TopicFilters}
|
||||
}).
|
||||
|
||||
-define(SUBACK_PACKET(PacketId, ReasonCodes),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?SUBACK},
|
||||
variable = #mqtt_packet_suback{packet_id = PacketId,
|
||||
reason_codes = ReasonCodes}
|
||||
}).
|
||||
|
||||
-define(SUBACK_PACKET(PacketId, Properties, ReasonCodes),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?SUBACK},
|
||||
variable = #mqtt_packet_suback{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
reason_codes = ReasonCodes}
|
||||
}).
|
||||
|
||||
-define(UNSUBSCRIBE_PACKET(PacketId, TopicFilters),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?UNSUBSCRIBE,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_unsubscribe{packet_id = PacketId,
|
||||
topic_filters = TopicFilters}
|
||||
}).
|
||||
|
||||
-define(UNSUBSCRIBE_PACKET(PacketId, Properties, TopicFilters),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?UNSUBSCRIBE,
|
||||
qos = ?QOS_1},
|
||||
variable = #mqtt_packet_unsubscribe{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
topic_filters = TopicFilters}
|
||||
}).
|
||||
|
||||
-define(UNSUBACK_PACKET(PacketId),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?UNSUBACK},
|
||||
variable = #mqtt_packet_unsuback{packet_id = PacketId}
|
||||
}).
|
||||
|
||||
-define(UNSUBACK_PACKET(PacketId, ReasonCodes),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?UNSUBACK},
|
||||
variable = #mqtt_packet_unsuback{packet_id = PacketId,
|
||||
reason_codes = ReasonCodes}
|
||||
}).
|
||||
|
||||
-define(UNSUBACK_PACKET(PacketId, Properties, ReasonCodes),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?UNSUBACK},
|
||||
variable = #mqtt_packet_unsuback{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
reason_codes = ReasonCodes}
|
||||
}).
|
||||
|
||||
-define(DISCONNECT_PACKET(),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?DISCONNECT},
|
||||
variable = #mqtt_packet_disconnect{reason_code = 0}
|
||||
}).
|
||||
|
||||
-define(DISCONNECT_PACKET(ReasonCode),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?DISCONNECT},
|
||||
variable = #mqtt_packet_disconnect{reason_code = ReasonCode}
|
||||
}).
|
||||
|
||||
-define(DISCONNECT_PACKET(ReasonCode, Properties),
|
||||
#mqtt_packet{header = #mqtt_packet_header{type = ?DISCONNECT},
|
||||
variable = #mqtt_packet_disconnect{reason_code = ReasonCode,
|
||||
properties = Properties}
|
||||
}).
|
||||
|
||||
-define(PACKET(Type), #mqtt_packet{header = #mqtt_packet_header{type = Type}}).
|
||||
|
||||
-endif.
|
||||
109
apps/iot/include/iot.hrl
Normal file
109
apps/iot/include/iot.hrl
Normal file
@ -0,0 +1,109 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 14. 2月 2023 19:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-author("licheng5").
|
||||
|
||||
%% 主机是否在线
|
||||
-define(HOST_OFFLINE, 0).
|
||||
-define(HOST_ONLINE, 1).
|
||||
-define(HOST_NOT_JOINED, -1).
|
||||
|
||||
%% 设备是否在线状态
|
||||
-define(DEVICE_OFFLINE, 0).
|
||||
-define(DEVICE_ONLINE, 1).
|
||||
-define(DEVICE_NOT_JOINED, -1).
|
||||
|
||||
%% 下发的任务状态
|
||||
-define(TASK_STATUS_INIT, -1). %% 未接入
|
||||
-define(TASK_STATUS_FAILED, 0). %% 离线
|
||||
-define(TASK_STATUS_OK, 1). %% 在线
|
||||
|
||||
%% 主机端上报数据类型标识
|
||||
%% 建立到websocket的register关系
|
||||
-define(METHOD_AUTH, 16#00).
|
||||
-define(METHOD_CREATE_SESSION, 16#01).
|
||||
|
||||
-define(METHOD_DATA, 16#02).
|
||||
-define(METHOD_PING, 16#03).
|
||||
-define(METHOD_INFORM, 16#04).
|
||||
-define(METHOD_FEEDBACK_STEP, 16#05).
|
||||
-define(METHOD_FEEDBACK_RESULT, 16#06).
|
||||
-define(METHOD_EVENT, 16#07).
|
||||
%% ai识别的事件上报
|
||||
-define(METHOD_AI_EVENT, 16#08).
|
||||
|
||||
%% 消息体类型
|
||||
-define(PACKET_REQUEST, 16#01).
|
||||
-define(PACKET_RESPONSE, 16#02).
|
||||
%% 服务器端推送消息
|
||||
-define(PACKET_PUBLISH, 16#03).
|
||||
-define(PACKET_PUBLISH_RESPONSE, 16#04).
|
||||
|
||||
%% 事件类型
|
||||
-define(EVENT_DEVICE, 16#01).
|
||||
%% 主机的相关事件
|
||||
-define(EVENT_HOST, 16#02).
|
||||
|
||||
%% ai相关的事件
|
||||
-define(EVENT_AI, 16#03).
|
||||
|
||||
%% 指令相关
|
||||
-define(DIRECTIVE_ZD_CTRL, 16#01).
|
||||
|
||||
%% 缓存数据库表
|
||||
-record(kv, {
|
||||
key :: binary(),
|
||||
val :: binary() | list() | map() | sets:set(),
|
||||
expire_at = 0 :: integer(),
|
||||
type :: atom()
|
||||
}).
|
||||
|
||||
%% id生成器
|
||||
-record(id_generator, {
|
||||
tab :: atom(),
|
||||
increment_id = 0 :: integer()
|
||||
}).
|
||||
|
||||
%% 统计项
|
||||
-record(option, {
|
||||
success_num = 0,
|
||||
fail_num = 0
|
||||
}).
|
||||
|
||||
%% 统计累加器
|
||||
-record(totalizator, {
|
||||
key :: {SceneId :: integer(), Date :: calendar:date()},
|
||||
scene_id :: integer(),
|
||||
date :: calendar:date(),
|
||||
option :: #option{}
|
||||
}).
|
||||
|
||||
%% 北向数据
|
||||
-record(north_data, {
|
||||
id = 0 :: integer(),
|
||||
location_code :: binary(),
|
||||
%% 数据库类型的endpoint, 可以返回list: [{K, V}, {K1, V1}]
|
||||
fields :: [{K :: binary(), V :: any()}],
|
||||
timestamp = 0 :: integer()
|
||||
}).
|
||||
|
||||
%% 事件相关的数据
|
||||
-record(event_data, {
|
||||
id = 0 :: integer(),
|
||||
location_code :: binary(),
|
||||
event_type :: integer(),
|
||||
params :: map()
|
||||
}).
|
||||
|
||||
%% 发送数据
|
||||
-record(post_data, {
|
||||
id = 0 :: integer(),
|
||||
location_code :: binary(),
|
||||
%% 数据库类型的endpoint, 可以返回list: [{K, V}, {K1, V1}]
|
||||
body :: binary() | list()
|
||||
}).
|
||||
1
apps/iot/priv/jinzhi_pri.key
Normal file
1
apps/iot/priv/jinzhi_pri.key
Normal file
@ -0,0 +1 @@
|
||||
MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBALHOer3l1/Op2N9m8SGeoryvumNjcz7yD41YmqTjIEptA20l4k3MIT5R6iCwLeky2QGk/ZHn1es6Z7SCUFk6x4+dFZ40HuT7CeRPpeRo2U/vxPt/FzChClpo79TclCvJBemnOJ8bC0z/Afm/kfs3LSYNbNIA6qy+IitifIKg2DfpAgMBAAECgYAz0+rlNXz4Encbz2bUFOh8tYBP/ioWm/o6iiwxid7cst//zb4kTS8XeksTkicfxWmJ2CztfbVWJqUZ8a44BDEsxrbLwVvuAPNdUChyoOkT0LeYEaeVaV35m6Hv3EkCeTUne8GQA8Z4Fx4ndpO9YkttQuu/8UQZ0FM73wrNkN0zrQJBAPcDeO61ZgnC6jlbrHj82224g9AXT2UBYzP14TaWWElbF9y3lxMrQ+f/KYzDaE3BR2UZdihv601lze0MsxeCzR8CQQC4RnT6ekvAi9CCktCVV1HJ5kpzpqejNFTs9x4WJYKG14CwbMyDIaKobB/N4Ylv0qliPPDPs4V3DAuFZtnEEtH3AkEAioCE73wBAdor0QuJErHdK5F5P1XCq8TyZfEpXZ1BVahhId5DNHle8xeMqaPruSV1rcdwDE5s5pH9vDwRs04hSwJBAJ8QmotYI6maRqtfhdNTo5MPSbcY5V24n5JJIdxmFozE2x3vXH3Y++o8Ixv5kkRHaNUW25u+T/faGtvVUyawRDMCQQD1ApVjihrgogCGyk00shzBcEzA7ZUGZrI6Fwjf5oanbR2SLUUfnbGWnvdURV6Luq6YsIiFzCL69rjY5aB7EqEp
|
||||
285
apps/iot/src/consumer/iot_zd_consumer.erl
Normal file
285
apps/iot/src/consumer/iot_zd_consumer.erl
Normal file
@ -0,0 +1,285 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%% 1. 需要考虑集群部署的相关问题,上行的数据可能在集群中共享
|
||||
%%% 2. host进程不能直接去监听topic,这样涉及到新增和下线的很多问题
|
||||
%%% @end
|
||||
%%% Created : 12. 3月 2023 21:27
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_zd_consumer).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/0]).
|
||||
-export([mock/5]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
-define(RETRY_INTERVAL, 5000).
|
||||
|
||||
%% 执行超时时间
|
||||
-define(EXECUTE_TIMEOUT, 10 * 1000).
|
||||
|
||||
%% 需要订阅的主题信息
|
||||
-define(Topics,[
|
||||
{<<"CET/NX/download">>, 2}
|
||||
]).
|
||||
|
||||
-record(state, {
|
||||
conn_pid :: undefined | pid(),
|
||||
logger_pid :: pid(),
|
||||
mqtt_props :: list(),
|
||||
%% 执行中的任务数
|
||||
flight_num = 0
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
mock(LocationCode, Para, SType, CType, Value) when is_binary(LocationCode), is_integer(SType), is_integer(CType), is_integer(Para) ->
|
||||
Req = #{
|
||||
<<"version">> => <<"1.0">>,
|
||||
<<"ts">> => iot_util:current_time(),
|
||||
<<"properties">> => #{
|
||||
<<"type">> => <<"ctrl">>,
|
||||
<<"para">> => Para,
|
||||
<<"stype">> => SType,
|
||||
<<"ctype">> => CType,
|
||||
<<"value">> => Value,
|
||||
<<"timestamp">> => iot_util:current_time()
|
||||
},
|
||||
<<"location_code">> => LocationCode
|
||||
},
|
||||
gen_server:call(?MODULE, {mock, Req}).
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link() ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([]) ->
|
||||
erlang:process_flag(trap_exit, true),
|
||||
|
||||
{ok, Props} = application:get_env(iot, zhongdian),
|
||||
%% 创建转发器, 避免阻塞当前进程的创建,因此采用了延时初始化的机制
|
||||
erlang:start_timer(0, self(), create_consumer),
|
||||
%% 启动日志记录器
|
||||
{ok, LoggerPid} = iot_logger:start_link("zd_directive_data"),
|
||||
|
||||
{ok, #state{mqtt_props = Props, conn_pid = undefined, logger_pid = LoggerPid}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call({mock, Request}, _From, State = #state{conn_pid = ConnPid, flight_num = FlightNum}) when is_pid(ConnPid) ->
|
||||
publish_directive(Request, jiffy:encode(Request, [force_utf8])),
|
||||
{reply, ok, State#state{flight_num = FlightNum + 1}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast(_Request, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info({disconnect, ReasonCode, Properties}, State) ->
|
||||
lager:debug("[iot_zd_consumer] Recv a DISONNECT packet - ReasonCode: ~p, Properties: ~p", [ReasonCode, Properties]),
|
||||
{stop, disconnected, State};
|
||||
%% 必须要做到消息的快速分发,数据的json反序列需要在host进程进行
|
||||
handle_info({publish, #{packet_id := _PacketId, payload := Payload, qos := 2, topic := Topic}}, State = #state{flight_num = FlightNum}) ->
|
||||
lager:debug("[iot_zd_consumer] Recv a topic: ~p, publish packet: ~ts, qos: 2", [Topic, Payload]),
|
||||
|
||||
Request = catch jiffy:decode(Payload, [return_maps]),
|
||||
publish_directive(Request, Payload),
|
||||
|
||||
{noreply, State#state{flight_num = FlightNum + 1}};
|
||||
|
||||
handle_info({publish, #{packet_id := _PacketId, payload := Payload, qos := Qos, topic := Topic}}, State) ->
|
||||
lager:notice("[iot_zd_consumer] Recv a topic: ~p, publish packet: ~ts, qos: ~p, qos is error", [Topic, Payload, Qos]),
|
||||
{noreply, State};
|
||||
|
||||
handle_info({puback, Packet = #{packet_id := _PacketId}}, State = #state{}) ->
|
||||
lager:debug("[iot_zd_consumer] receive puback packet: ~p", [Packet]),
|
||||
{noreply, State};
|
||||
|
||||
handle_info({timeout, _, create_consumer}, State = #state{mqtt_props = Props, conn_pid = undefined}) ->
|
||||
try
|
||||
{ok, ConnPid} = create_consumer(Props),
|
||||
{noreply, State#state{conn_pid = ConnPid}}
|
||||
catch _:Error:Stack ->
|
||||
lager:warning("[iot_zd_consumer] config: ~p, create consumer get error: ~p, stack: ~p", [Props, Error, Stack]),
|
||||
erlang:start_timer(?RETRY_INTERVAL, self(), create_consumer),
|
||||
{noreply, State#state{conn_pid = undefined}}
|
||||
end;
|
||||
|
||||
%% postman进程挂掉时,重新建立新的
|
||||
handle_info({'EXIT', ConnPid, Reason}, State = #state{conn_pid = ConnPid}) ->
|
||||
lager:warning("[iot_zd_consumer] consumer exited with reason: ~p", [Reason]),
|
||||
erlang:start_timer(?RETRY_INTERVAL, self(), create_consumer),
|
||||
|
||||
{noreply, State#state{conn_pid = undefined}};
|
||||
|
||||
handle_info({'EXIT', LoggerPid, Reason}, State = #state{logger_pid = LoggerPid}) ->
|
||||
lager:warning("[iot_zd_consumer] logger exited with reason: ~p", [Reason]),
|
||||
{ok, LoggerPid} = iot_logger:start_link("zd_directive_data"),
|
||||
|
||||
{noreply, State#state{logger_pid = LoggerPid}};
|
||||
|
||||
handle_info({directive_reply, Reply}, State = #state{logger_pid = LoggerPid, flight_num = FlightNum}) ->
|
||||
FlightInfo = <<"flight_num: ", (integer_to_binary(FlightNum - 1))/binary>>,
|
||||
case Reply of
|
||||
{ok, RawReq, DirectiveResult} ->
|
||||
case DirectiveResult of
|
||||
ok ->
|
||||
iot_logger:write(LoggerPid, [<<"[success]">>, RawReq, <<"OK">>, FlightInfo]);
|
||||
{ok, Response} when is_binary(Response) ->
|
||||
iot_logger:write(LoggerPid, [<<"[success]">>, RawReq, Response, FlightInfo]);
|
||||
{error, Reason0} ->
|
||||
Reason = if
|
||||
is_atom(Reason0) -> atom_to_binary(Reason0);
|
||||
is_binary(Reason0) -> Reason0;
|
||||
true -> <<"Unknow error">>
|
||||
end,
|
||||
iot_logger:write(LoggerPid, [<<"[error]">>, RawReq, Reason, FlightInfo])
|
||||
end;
|
||||
{error, RawReq, Error} when is_binary(Error) ->
|
||||
iot_logger:write(LoggerPid, [<<"[error]">>, RawReq, Error, FlightInfo])
|
||||
end,
|
||||
{noreply, State#state{flight_num = FlightNum - 1}};
|
||||
|
||||
handle_info(Info, State = #state{}) ->
|
||||
lager:notice("[iot_zd_consumer] get a unknown info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(Reason, _State = #state{conn_pid = ConnPid}) when is_pid(ConnPid) ->
|
||||
%% 取消topic的订阅
|
||||
TopicNames = lists:map(fun({Name, _}) -> Name end, ?Topics),
|
||||
{ok, _Props, _ReasonCode} = emqtt:unsubscribe(ConnPid, #{}, TopicNames),
|
||||
|
||||
ok = emqtt:disconnect(ConnPid),
|
||||
lager:debug("[iot_zd_consumer] terminate with reason: ~p", [Reason]),
|
||||
ok;
|
||||
terminate(Reason, _State) ->
|
||||
lager:debug("[iot_zd_consumer] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
publish_directive(#{<<"version">> := Version, <<"location_code">> := LocationCode, <<"properties">> := DirectiveParams}, RawReq) ->
|
||||
%% 通过LocationCode查找到主机和Device_uuid
|
||||
ReceiverPid = self(),
|
||||
case redis_client:hgetall(LocationCode) of
|
||||
{ok, #{<<"host_uuid">> := HostUUID, <<"device_uuid">> := DeviceUUID}} ->
|
||||
case iot_host:get_pid(HostUUID) of
|
||||
undefined ->
|
||||
ReceiverPid ! {directive_reply, {error, RawReq, <<"host uuid: ", HostUUID/binary, " not found">>}};
|
||||
Pid ->
|
||||
spawn(fun() ->
|
||||
DirectiveResult = iot_host:publish_directive(Pid, DeviceUUID, ?DIRECTIVE_ZD_CTRL, Version, DirectiveParams, ?EXECUTE_TIMEOUT),
|
||||
lager:debug("[iot_zd_consumer] get publish_directive result: ~p", [DirectiveResult]),
|
||||
ReceiverPid ! {directive_reply, {ok, RawReq, DirectiveResult}}
|
||||
end)
|
||||
end;
|
||||
{ok, Map} when is_map(Map) ->
|
||||
RedisData = iolist_to_binary(jiffy:encode(Map, [force_utf8])),
|
||||
ReceiverPid ! {directive_reply, {error, RawReq, <<"invalid redis data: ", RedisData/binary>>}};
|
||||
_ ->
|
||||
ReceiverPid ! {directive_reply, {error, RawReq, <<"location_code: ", LocationCode/binary, " not found in redis">>}}
|
||||
end;
|
||||
publish_directive(Other, RawReq) ->
|
||||
lager:warning("[iot_zd_consumer] get a error message: ~p", [Other]),
|
||||
self() ! {directive_reply, {error, RawReq, <<"unknown directive">>}}.
|
||||
|
||||
-spec create_consumer(Props :: list()) -> {ok, ConnPid :: pid()} | {error, Reason :: any()}.
|
||||
create_consumer(Props) when is_list(Props) ->
|
||||
Node = atom_to_binary(node()),
|
||||
ClientId = <<"mqtt-client-", Node/binary, "-zhongdian_mqtt_consumer">>,
|
||||
|
||||
%% 建立到emqx服务器的连接
|
||||
Host = proplists:get_value(host, Props),
|
||||
Port = proplists:get_value(port, Props, 18080),
|
||||
Username = proplists:get_value(username, Props),
|
||||
Password = proplists:get_value(password, Props),
|
||||
Keepalive = proplists:get_value(keepalive, Props, 86400),
|
||||
|
||||
Opts = [
|
||||
{clientid, ClientId},
|
||||
{host, Host},
|
||||
{port, Port},
|
||||
{owner, self()},
|
||||
{tcp_opts, []},
|
||||
{username, Username},
|
||||
{password, Password},
|
||||
{keepalive, Keepalive},
|
||||
{auto_ack, true},
|
||||
{connect_timeout, 5000},
|
||||
{proto_ver, v5},
|
||||
{retry_interval, 5000}
|
||||
],
|
||||
|
||||
%% 建立到emqx服务器的连接
|
||||
lager:debug("[iot_zd_consumer] opts is: ~p", [Opts]),
|
||||
case emqtt:start_link(Opts) of
|
||||
{ok, ConnPid} ->
|
||||
%% 监听和host相关的全部事件
|
||||
lager:debug("[iot_zd_consumer] start conntecting, pid: ~p", [ConnPid]),
|
||||
{ok, _} = emqtt:connect(ConnPid),
|
||||
lager:debug("[iot_zd_consumer] connect success, pid: ~p", [ConnPid]),
|
||||
SubscribeResult = emqtt:subscribe(ConnPid, ?Topics),
|
||||
lager:debug("[iot_zd_consumer] subscribe topics: ~p, result is: ~p", [?Topics, SubscribeResult]),
|
||||
|
||||
{ok, ConnPid};
|
||||
ignore ->
|
||||
{error, ignore};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
30
apps/iot/src/database/ai_event_logs_bo.erl
Normal file
30
apps/iot/src/database/ai_event_logs_bo.erl
Normal file
@ -0,0 +1,30 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(ai_event_logs_bo).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-export([insert/6]).
|
||||
|
||||
%% API
|
||||
|
||||
-spec insert(HostUUID :: binary(), DeviceUUID :: binary(), SceneId :: integer(), MicroId :: integer(), EventType :: integer(), Content :: binary()) ->
|
||||
ok | {ok, InsertId :: integer()} | {error, Reason :: any()}.
|
||||
insert(HostUUID, DeviceUUID, SceneId, MicroId, EventType, Content)
|
||||
when is_integer(EventType), is_binary(HostUUID), is_binary(DeviceUUID), is_integer(SceneId), is_integer(MicroId), is_binary(Content) ->
|
||||
|
||||
mysql_pool:insert(mysql_iot, <<"ai_event_logs">>, #{
|
||||
<<"event_type">> => EventType,
|
||||
<<"host_uuid">> => HostUUID,
|
||||
<<"device_uuid">> => DeviceUUID,
|
||||
<<"scene_id">> => SceneId,
|
||||
<<"micro_id">> => MicroId,
|
||||
<<"content">> => Content,
|
||||
<<"created_at">> => calendar:local_time()
|
||||
}, true).
|
||||
54
apps/iot/src/database/device_bo.erl
Normal file
54
apps/iot/src/database/device_bo.erl
Normal file
@ -0,0 +1,54 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(device_bo).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([get_all_devices/0, get_host_devices/1, get_device_by_uuid/1, change_status/2]).
|
||||
|
||||
-spec get_all_devices() -> {ok, Devices :: [map()]} | {error, Reason :: any()}.
|
||||
get_all_devices() ->
|
||||
mysql_pool:get_all(mysql_iot, <<"SELECT * FROM device WHERE device_uuid != ''">>).
|
||||
|
||||
-spec get_host_devices(HostId :: integer()) -> {ok, Devices :: [binary()]} | {error, Reason::any()}.
|
||||
get_host_devices(HostId) when is_integer(HostId) ->
|
||||
case mysql_pool:get_all(mysql_iot, <<"SELECT device_uuid FROM device WHERE host_id = ? AND device_uuid != ''">>, [HostId]) of
|
||||
{ok, Devices} ->
|
||||
{ok, lists:map(fun(#{<<"device_uuid">> := DeviceUUID}) -> DeviceUUID end, Devices)};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec get_device_by_uuid(DeviceUUID :: binary()) -> {ok, DeviceInfo :: map()} | undefined.
|
||||
get_device_by_uuid(DeviceUUID) when is_binary(DeviceUUID) ->
|
||||
mysql_pool:get_row(mysql_iot, <<"SELECT * FROM device WHERE device_uuid = ? LIMIT 1">>, [DeviceUUID]).
|
||||
|
||||
%% 修改主机的状态
|
||||
-spec change_status(DeviceUUID :: binary(), Status :: integer()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
change_status(DeviceUUID, NStatus) when is_binary(DeviceUUID), is_integer(NStatus) ->
|
||||
case change_status0(DeviceUUID, NStatus) of
|
||||
Result = {ok, _} ->
|
||||
event_logs_bo:insert(?EVENT_DEVICE, DeviceUUID, NStatus),
|
||||
Result;
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
change_status0(DeviceUUID, ?DEVICE_ONLINE) when is_binary(DeviceUUID) ->
|
||||
Timestamp = calendar:local_time(),
|
||||
case mysql_pool:get_row(mysql_iot, <<"SELECT status FROM device WHERE device_uuid = ? LIMIT 1">>, [DeviceUUID]) of
|
||||
{ok, #{<<"status">> := -1}} ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE device SET status = ?, access_at = ?, updated_at = ? WHERE device_uuid = ? LIMIT 1">>, [?DEVICE_ONLINE, Timestamp, Timestamp, DeviceUUID]);
|
||||
{ok, _} ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE device SET status = ?, updated_at = ? WHERE device_uuid = ? LIMIT 1">>, [?DEVICE_ONLINE, Timestamp, DeviceUUID]);
|
||||
undefined ->
|
||||
{error, <<"device not found">>}
|
||||
end;
|
||||
change_status0(DeviceUUID, ?DEVICE_OFFLINE) when is_binary(DeviceUUID) ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE device SET status = ? WHERE device_uuid = ? LIMIT 1">>, [?DEVICE_OFFLINE, DeviceUUID]).
|
||||
25
apps/iot/src/database/event_logs_bo.erl
Normal file
25
apps/iot/src/database/event_logs_bo.erl
Normal file
@ -0,0 +1,25 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(event_logs_bo).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-export([insert/3]).
|
||||
|
||||
%% API
|
||||
|
||||
-spec insert(EventType :: integer(), AssocUUID :: binary(), Status :: integer()) ->
|
||||
{ok, InsertId :: integer()} | {error, Reason :: any()}.
|
||||
insert(EventType, AssocUUID, Status) when is_integer(EventType), is_binary(AssocUUID), is_integer(Status) ->
|
||||
mysql_pool:insert(mysql_iot, <<"event_logs">>, #{
|
||||
<<"event_type">> => EventType,
|
||||
<<"assoc_uuid">> => AssocUUID,
|
||||
<<"status">> => Status,
|
||||
<<"created_at">> => calendar:local_time()
|
||||
}, true).
|
||||
55
apps/iot/src/database/host_bo.erl
Normal file
55
apps/iot/src/database/host_bo.erl
Normal file
@ -0,0 +1,55 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(host_bo).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([get_all_hosts/0, change_status/2, get_host_by_uuid/1, get_host_by_id/1]).
|
||||
|
||||
-spec get_all_hosts() -> UUIDList :: [binary()].
|
||||
get_all_hosts() ->
|
||||
case mysql_pool:get_all(mysql_iot, <<"SELECT uuid FROM host where uuid != ''">>) of
|
||||
{ok, Hosts} ->
|
||||
lists:map(fun(#{<<"uuid">> := UUID}) -> UUID end, Hosts);
|
||||
{error, _} ->
|
||||
[]
|
||||
end.
|
||||
|
||||
-spec get_host_by_uuid(UUID :: binary()) -> undefined | {ok, HostInfo :: map()}.
|
||||
get_host_by_uuid(UUID) when is_binary(UUID) ->
|
||||
mysql_pool:get_row(mysql_iot, <<"SELECT * FROM host WHERE uuid = ? LIMIT 1">>, [UUID]).
|
||||
|
||||
-spec get_host_by_id(HostId :: integer()) -> undefined | {ok, HostInfo :: map()}.
|
||||
get_host_by_id(HostId) when is_integer(HostId) ->
|
||||
mysql_pool:get_row(mysql_iot, <<"SELECT * FROM host WHERE id = ? LIMIT 1">>, [HostId]).
|
||||
|
||||
%% 修改主机的状态
|
||||
-spec change_status(UUID :: binary(), Status :: integer()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
change_status(UUID, NStatus) when is_binary(UUID), is_integer(NStatus) ->
|
||||
case change_status0(UUID, NStatus) of
|
||||
Result = {ok, _} ->
|
||||
event_logs_bo:insert(?EVENT_HOST, UUID, NStatus),
|
||||
Result;
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
change_status0(UUID, ?HOST_ONLINE) when is_binary(UUID) ->
|
||||
Timestamp = calendar:local_time(),
|
||||
case mysql_pool:get_row(mysql_iot, <<"SELECT status FROM host WHERE uuid = ? LIMIT 1">>, [UUID]) of
|
||||
%% 第一次更新激活
|
||||
{ok, #{<<"status">> := -1}} ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE host SET status = ?, access_at = ?, updated_at = ? WHERE uuid = ? LIMIT 1">>, [?HOST_ONLINE, Timestamp, Timestamp, UUID]);
|
||||
{ok, _} ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE host SET status = ?, updated_at = ? WHERE uuid = ? LIMIT 1">>, [?HOST_ONLINE, Timestamp, UUID]);
|
||||
undefined ->
|
||||
{error, <<"host not found">>}
|
||||
end;
|
||||
change_status0(UUID, ?HOST_OFFLINE) when is_binary(UUID) ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE host SET status = ? WHERE uuid = ? LIMIT 1">>, [?HOST_OFFLINE, UUID]).
|
||||
17
apps/iot/src/database/micro_inform_log.erl
Normal file
17
apps/iot/src/database/micro_inform_log.erl
Normal file
@ -0,0 +1,17 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(micro_inform_log).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([insert/1]).
|
||||
|
||||
insert(Fields) when is_map(Fields) ->
|
||||
mysql_pool:insert(mysql_iot, <<"micro_inform_log">>, Fields, true).
|
||||
19
apps/iot/src/database/micro_set_bo.erl
Normal file
19
apps/iot/src/database/micro_set_bo.erl
Normal file
@ -0,0 +1,19 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(micro_set_bo).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([change_status/4]).
|
||||
|
||||
%% 修改主机的状态
|
||||
-spec change_status(HostId :: integer(), SceneId :: integer(), MircoId :: integer(), Status :: integer()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
change_status(HostId, SceneId, MircoId, Status) when is_integer(HostId), is_integer(SceneId), is_integer(MircoId), is_integer(Status) ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE micro_set SET status = ? WHERE host_id = ? AND scene_id = ? AND micro_id = ? LIMIT 1">>, [Status, HostId, SceneId, MircoId]).
|
||||
17
apps/iot/src/database/scene_feedback.erl
Normal file
17
apps/iot/src/database/scene_feedback.erl
Normal file
@ -0,0 +1,17 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(scene_feedback).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([insert/1]).
|
||||
|
||||
insert(Fields) when is_map(Fields) ->
|
||||
mysql_pool:insert(mysql_iot, <<"scene_feedback">>, Fields, true).
|
||||
17
apps/iot/src/database/scene_feedback_step.erl
Normal file
17
apps/iot/src/database/scene_feedback_step.erl
Normal file
@ -0,0 +1,17 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(scene_feedback_step).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([insert/1]).
|
||||
|
||||
insert(Fields) when is_map(Fields) ->
|
||||
mysql_pool:insert(mysql_iot, <<"scene_feedback_step">>, Fields, true).
|
||||
19
apps/iot/src/database/task_logs_bo.erl
Normal file
19
apps/iot/src/database/task_logs_bo.erl
Normal file
@ -0,0 +1,19 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 16. 5月 2023 12:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(task_logs_bo).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([change_status/2]).
|
||||
|
||||
%% 修改主机的状态
|
||||
-spec change_status(TaskId :: integer(), Status :: integer()) -> {ok, AffectedRow :: integer()} | {error, Reason :: any()}.
|
||||
change_status(TaskId, Status) when is_integer(TaskId), is_integer(Status) ->
|
||||
mysql_pool:update_by(mysql_iot, <<"UPDATE task_logs SET status = ? WHERE id = ? LIMIT 1">>, [Status, TaskId]).
|
||||
1319
apps/iot/src/emqtt/emqtt.erl
Normal file
1319
apps/iot/src/emqtt/emqtt.erl
Normal file
File diff suppressed because it is too large
Load Diff
738
apps/iot/src/emqtt/emqtt_frame.erl
Normal file
738
apps/iot/src/emqtt/emqtt_frame.erl
Normal file
@ -0,0 +1,738 @@
|
||||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqtt_frame).
|
||||
|
||||
-include("emqtt.hrl").
|
||||
|
||||
-export([initial_parse_state/0, initial_parse_state/1]).
|
||||
|
||||
-export([parse/1, parse/2, serialize_fun/0, serialize_fun/1, serialize/1, serialize/2 ]).
|
||||
|
||||
-export_type([options/0, parse_state/0, parse_result/0, serialize_fun/0]).
|
||||
|
||||
-type(version() :: ?MQTT_PROTO_V3
|
||||
| ?MQTT_PROTO_V4
|
||||
| ?MQTT_PROTO_V5).
|
||||
|
||||
-type(options() :: #{strict_mode => boolean(),
|
||||
max_size => 1..?MAX_PACKET_SIZE,
|
||||
version => version()}).
|
||||
|
||||
-opaque(parse_state() :: {none, options()} | cont_fun()).
|
||||
|
||||
-opaque(parse_result() :: {more, cont_fun()}
|
||||
| {ok, #mqtt_packet{}, binary(), parse_state()}).
|
||||
|
||||
-type(cont_fun() :: fun((binary()) -> parse_result())).
|
||||
|
||||
-type(serialize_fun() :: fun((emqx_types:packet()) -> iodata())).
|
||||
|
||||
-define(none(Options), {none, Options}).
|
||||
|
||||
-define(DEFAULT_OPTIONS,
|
||||
#{strict_mode => false,
|
||||
max_size => ?MAX_PACKET_SIZE,
|
||||
version => ?MQTT_PROTO_V4
|
||||
}).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Init Parse State
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(initial_parse_state() -> {none, options()}).
|
||||
initial_parse_state() ->
|
||||
initial_parse_state(#{}).
|
||||
|
||||
-spec(initial_parse_state(options()) -> {none, options()}).
|
||||
initial_parse_state(Options) when is_map(Options) ->
|
||||
?none(merge_opts(Options)).
|
||||
|
||||
%% @pivate
|
||||
merge_opts(Options) ->
|
||||
maps:merge(?DEFAULT_OPTIONS, Options).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Parse MQTT Frame
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-spec(parse(binary()) -> parse_result()).
|
||||
parse(Bin) ->
|
||||
parse(Bin, initial_parse_state()).
|
||||
|
||||
-spec(parse(binary(), parse_state()) -> parse_result()).
|
||||
parse(<<>>, {none, Options}) ->
|
||||
{more, fun(Bin) -> parse(Bin, {none, Options}) end};
|
||||
parse(<<Type:4, Dup:1, QoS:2, Retain:1, Rest/binary>>,
|
||||
{none, Options = #{strict_mode := StrictMode}}) ->
|
||||
%% Validate header if strict mode.
|
||||
StrictMode andalso validate_header(Type, Dup, QoS, Retain),
|
||||
Header = #mqtt_packet_header{type = Type,
|
||||
dup = bool(Dup),
|
||||
qos = QoS,
|
||||
retain = bool(Retain)
|
||||
},
|
||||
Header1 = case fixqos(Type, QoS) of
|
||||
QoS -> Header;
|
||||
FixedQoS -> Header#mqtt_packet_header{qos = FixedQoS}
|
||||
end,
|
||||
parse_remaining_len(Rest, Header1, Options);
|
||||
parse(Bin, Cont) when is_binary(Bin), is_function(Cont) ->
|
||||
Cont(Bin).
|
||||
|
||||
parse_remaining_len(<<>>, Header, Options) ->
|
||||
{more, fun(Bin) -> parse_remaining_len(Bin, Header, Options) end};
|
||||
parse_remaining_len(Rest, Header, Options) ->
|
||||
parse_remaining_len(Rest, Header, 1, 0, Options).
|
||||
|
||||
parse_remaining_len(_Bin, _Header, _Multiplier, Length, #{max_size := MaxSize}) when Length > MaxSize ->
|
||||
error(frame_too_large);
|
||||
parse_remaining_len(<<>>, Header, Multiplier, Length, Options) ->
|
||||
{more, fun(Bin) -> parse_remaining_len(Bin, Header, Multiplier, Length, Options) end};
|
||||
%% Match DISCONNECT without payload
|
||||
parse_remaining_len(<<0:8, Rest/binary>>, Header = #mqtt_packet_header{type = ?DISCONNECT}, 1, 0, Options) ->
|
||||
Packet = packet(Header, #mqtt_packet_disconnect{reason_code = ?RC_SUCCESS}),
|
||||
{ok, Packet, Rest, ?none(Options)};
|
||||
%% Match PINGREQ.
|
||||
parse_remaining_len(<<0:8, Rest/binary>>, Header, 1, 0, Options) ->
|
||||
parse_frame(Rest, Header, 0, Options);
|
||||
%% Match PUBACK, PUBREC, PUBREL, PUBCOMP, UNSUBACK...
|
||||
parse_remaining_len(<<0:1, 2:7, Rest/binary>>, Header, 1, 0, Options) ->
|
||||
parse_frame(Rest, Header, 2, Options);
|
||||
parse_remaining_len(<<1:1, Len:7, Rest/binary>>, Header, Multiplier, Value, Options) ->
|
||||
parse_remaining_len(Rest, Header, Multiplier * ?HIGHBIT, Value + Len * Multiplier, Options);
|
||||
parse_remaining_len(<<0:1, Len:7, Rest/binary>>, Header, Multiplier, Value,
|
||||
Options = #{max_size := MaxSize}) ->
|
||||
FrameLen = Value + Len * Multiplier,
|
||||
if
|
||||
FrameLen > MaxSize ->
|
||||
error(frame_too_large);
|
||||
true ->
|
||||
parse_frame(Rest, Header, FrameLen, Options)
|
||||
end.
|
||||
|
||||
parse_frame(Bin, Header, 0, Options) ->
|
||||
{ok, packet(Header), Bin, ?none(Options)};
|
||||
|
||||
parse_frame(Bin, Header, Length, Options) ->
|
||||
case Bin of
|
||||
<<FrameBin:Length/binary, Rest/binary>> ->
|
||||
case parse_packet(Header, FrameBin, Options) of
|
||||
{Variable, Payload} ->
|
||||
{ok, packet(Header, Variable, Payload), Rest, ?none(Options)};
|
||||
Variable = #mqtt_packet_connect{proto_ver = Ver} ->
|
||||
{ok, packet(Header, Variable), Rest, ?none(Options#{version := Ver})};
|
||||
Variable ->
|
||||
{ok, packet(Header, Variable), Rest, ?none(Options)}
|
||||
end;
|
||||
TooShortBin ->
|
||||
{more, fun(BinMore) ->
|
||||
parse_frame(<<TooShortBin/binary, BinMore/binary>>, Header, Length, Options)
|
||||
end}
|
||||
end.
|
||||
|
||||
-compile({inline, [packet/1, packet/2, packet/3]}).
|
||||
packet(Header) ->
|
||||
#mqtt_packet{header = Header}.
|
||||
packet(Header, Variable) ->
|
||||
#mqtt_packet{header = Header, variable = Variable}.
|
||||
packet(Header, Variable, Payload) ->
|
||||
#mqtt_packet{header = Header, variable = Variable, payload = Payload}.
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?CONNECT}, FrameBin, _Options) ->
|
||||
{ProtoName, Rest} = parse_utf8_string(FrameBin),
|
||||
<<BridgeTag:4, ProtoVer:4, Rest1/binary>> = Rest,
|
||||
% Note: Crash when reserved flag doesn't equal to 0, there is no strict
|
||||
% compliance with the MQTT5.0.
|
||||
<<UsernameFlag : 1,
|
||||
PasswordFlag : 1,
|
||||
WillRetain : 1,
|
||||
WillQoS : 2,
|
||||
WillFlag : 1,
|
||||
CleanStart : 1,
|
||||
0 : 1,
|
||||
KeepAlive : 16/big,
|
||||
Rest2/binary>> = Rest1,
|
||||
|
||||
{Properties, Rest3} = parse_properties(Rest2, ProtoVer),
|
||||
{ClientId, Rest4} = parse_utf8_string(Rest3),
|
||||
ConnPacket = #mqtt_packet_connect{proto_name = ProtoName,
|
||||
proto_ver = ProtoVer,
|
||||
is_bridge = (BridgeTag =:= 8),
|
||||
clean_start = bool(CleanStart),
|
||||
will_flag = bool(WillFlag),
|
||||
will_qos = WillQoS,
|
||||
will_retain = bool(WillRetain),
|
||||
keepalive = KeepAlive,
|
||||
properties = Properties,
|
||||
clientid = ClientId
|
||||
},
|
||||
{ConnPacket1, Rest5} = parse_will_message(ConnPacket, Rest4),
|
||||
{Username, Rest6} = parse_utf8_string(Rest5, bool(UsernameFlag)),
|
||||
{Passsword, <<>>} = parse_utf8_string(Rest6, bool(PasswordFlag)),
|
||||
ConnPacket1#mqtt_packet_connect{username = Username, password = Passsword};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?CONNACK},
|
||||
<<AckFlags:8, ReasonCode:8, Rest/binary>>, #{version := Ver}) ->
|
||||
{Properties, <<>>} = parse_properties(Rest, Ver),
|
||||
#mqtt_packet_connack{ack_flags = AckFlags,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?PUBLISH, qos = QoS}, Bin,
|
||||
#{strict_mode := StrictMode, version := Ver}) ->
|
||||
{TopicName, Rest} = parse_utf8_string(Bin),
|
||||
{PacketId, Rest1} = case QoS of
|
||||
?QOS_0 -> {undefined, Rest};
|
||||
_ -> parse_packet_id(Rest)
|
||||
end,
|
||||
(PacketId =/= undefined) andalso
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
{Properties, Payload} = parse_properties(Rest1, Ver),
|
||||
Publish = #mqtt_packet_publish{topic_name = TopicName,
|
||||
packet_id = PacketId,
|
||||
properties = Properties
|
||||
},
|
||||
{Publish, Payload};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = PubAck}, <<PacketId:16/big>>, #{strict_mode := StrictMode})
|
||||
when ?PUBACK =< PubAck, PubAck =< ?PUBCOMP ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
#mqtt_packet_puback{packet_id = PacketId, reason_code = 0};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = PubAck}, <<PacketId:16/big, ReasonCode, Rest/binary>>,
|
||||
#{strict_mode := StrictMode, version := Ver = ?MQTT_PROTO_V5})
|
||||
when ?PUBACK =< PubAck, PubAck =< ?PUBCOMP ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
{Properties, <<>>} = parse_properties(Rest, Ver),
|
||||
#mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?SUBSCRIBE}, <<PacketId:16/big, Rest/binary>>,
|
||||
#{strict_mode := StrictMode, version := Ver}) ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
{Properties, Rest1} = parse_properties(Rest, Ver),
|
||||
TopicFilters = parse_topic_filters(subscribe, Rest1),
|
||||
ok = validate_subqos([QoS || {_, #{qos := QoS}} <- TopicFilters]),
|
||||
#mqtt_packet_subscribe{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
topic_filters = TopicFilters
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?SUBACK}, <<PacketId:16/big, Rest/binary>>,
|
||||
#{strict_mode := StrictMode, version := Ver}) ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
{Properties, Rest1} = parse_properties(Rest, Ver),
|
||||
ReasonCodes = parse_reason_codes(Rest1),
|
||||
#mqtt_packet_suback{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
reason_codes = ReasonCodes
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?UNSUBSCRIBE}, <<PacketId:16/big, Rest/binary>>,
|
||||
#{strict_mode := StrictMode, version := Ver}) ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
{Properties, Rest1} = parse_properties(Rest, Ver),
|
||||
TopicFilters = parse_topic_filters(unsubscribe, Rest1),
|
||||
#mqtt_packet_unsubscribe{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
topic_filters = TopicFilters
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?UNSUBACK}, <<PacketId:16/big>>,
|
||||
#{strict_mode := StrictMode}) ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
#mqtt_packet_unsuback{packet_id = PacketId};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?UNSUBACK}, <<PacketId:16/big, Rest/binary>>,
|
||||
#{strict_mode := StrictMode, version := Ver}) ->
|
||||
StrictMode andalso validate_packet_id(PacketId),
|
||||
{Properties, Rest1} = parse_properties(Rest, Ver),
|
||||
ReasonCodes = parse_reason_codes(Rest1),
|
||||
#mqtt_packet_unsuback{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
reason_codes = ReasonCodes
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?DISCONNECT}, <<ReasonCode, Rest/binary>>,
|
||||
#{version := ?MQTT_PROTO_V5}) ->
|
||||
{Properties, <<>>} = parse_properties(Rest, ?MQTT_PROTO_V5),
|
||||
#mqtt_packet_disconnect{reason_code = ReasonCode,
|
||||
properties = Properties
|
||||
};
|
||||
|
||||
parse_packet(#mqtt_packet_header{type = ?AUTH}, <<ReasonCode, Rest/binary>>,
|
||||
#{version := ?MQTT_PROTO_V5}) ->
|
||||
{Properties, <<>>} = parse_properties(Rest, ?MQTT_PROTO_V5),
|
||||
#mqtt_packet_auth{reason_code = ReasonCode, properties = Properties}.
|
||||
|
||||
parse_will_message(Packet = #mqtt_packet_connect{will_flag = true,
|
||||
proto_ver = Ver}, Bin) ->
|
||||
{Props, Rest} = parse_properties(Bin, Ver),
|
||||
{Topic, Rest1} = parse_utf8_string(Rest),
|
||||
{Payload, Rest2} = parse_binary_data(Rest1),
|
||||
{Packet#mqtt_packet_connect{will_props = Props,
|
||||
will_topic = Topic,
|
||||
will_payload = Payload
|
||||
}, Rest2};
|
||||
parse_will_message(Packet, Bin) -> {Packet, Bin}.
|
||||
|
||||
-compile({inline, [parse_packet_id/1]}).
|
||||
parse_packet_id(<<PacketId:16/big, Rest/binary>>) ->
|
||||
{PacketId, Rest}.
|
||||
|
||||
parse_properties(Bin, Ver) when Ver =/= ?MQTT_PROTO_V5 ->
|
||||
{undefined, Bin};
|
||||
%% TODO: version mess?
|
||||
parse_properties(<<>>, ?MQTT_PROTO_V5) ->
|
||||
{#{}, <<>>};
|
||||
parse_properties(<<0, Rest/binary>>, ?MQTT_PROTO_V5) ->
|
||||
{#{}, Rest};
|
||||
parse_properties(Bin, ?MQTT_PROTO_V5) ->
|
||||
{Len, Rest} = parse_variable_byte_integer(Bin),
|
||||
<<PropsBin:Len/binary, Rest1/binary>> = Rest,
|
||||
{parse_property(PropsBin, #{}), Rest1}.
|
||||
|
||||
parse_property(<<>>, Props) ->
|
||||
Props;
|
||||
parse_property(<<16#01, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Payload-Format-Indicator' => Val});
|
||||
parse_property(<<16#02, Val:32/big, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Message-Expiry-Interval' => Val});
|
||||
parse_property(<<16#03, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Content-Type' => Val});
|
||||
parse_property(<<16#08, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Response-Topic' => Val});
|
||||
parse_property(<<16#09, Len:16/big, Val:Len/binary, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Correlation-Data' => Val});
|
||||
parse_property(<<16#0B, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_variable_byte_integer(Bin),
|
||||
parse_property(Rest, Props#{'Subscription-Identifier' => Val});
|
||||
parse_property(<<16#11, Val:32/big, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Session-Expiry-Interval' => Val});
|
||||
parse_property(<<16#12, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Assigned-Client-Identifier' => Val});
|
||||
parse_property(<<16#13, Val:16, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Server-Keep-Alive' => Val});
|
||||
parse_property(<<16#15, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Authentication-Method' => Val});
|
||||
parse_property(<<16#16, Len:16/big, Val:Len/binary, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Authentication-Data' => Val});
|
||||
parse_property(<<16#17, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Request-Problem-Information' => Val});
|
||||
parse_property(<<16#18, Val:32, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Will-Delay-Interval' => Val});
|
||||
parse_property(<<16#19, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Request-Response-Information' => Val});
|
||||
parse_property(<<16#1A, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Response-Information' => Val});
|
||||
parse_property(<<16#1C, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Server-Reference' => Val});
|
||||
parse_property(<<16#1F, Bin/binary>>, Props) ->
|
||||
{Val, Rest} = parse_utf8_string(Bin),
|
||||
parse_property(Rest, Props#{'Reason-String' => Val});
|
||||
parse_property(<<16#21, Val:16/big, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Receive-Maximum' => Val});
|
||||
parse_property(<<16#22, Val:16/big, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Topic-Alias-Maximum' => Val});
|
||||
parse_property(<<16#23, Val:16/big, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Topic-Alias' => Val});
|
||||
parse_property(<<16#24, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Maximum-QoS' => Val});
|
||||
parse_property(<<16#25, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Retain-Available' => Val});
|
||||
parse_property(<<16#26, Bin/binary>>, Props) ->
|
||||
{Pair, Rest} = parse_utf8_pair(Bin),
|
||||
case maps:find('User-Property', Props) of
|
||||
{ok, UserProps} ->
|
||||
UserProps1 = lists:append(UserProps, [Pair]),
|
||||
parse_property(Rest, Props#{'User-Property' := UserProps1});
|
||||
error ->
|
||||
parse_property(Rest, Props#{'User-Property' => [Pair]})
|
||||
end;
|
||||
parse_property(<<16#27, Val:32, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Maximum-Packet-Size' => Val});
|
||||
parse_property(<<16#28, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Wildcard-Subscription-Available' => Val});
|
||||
parse_property(<<16#29, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Subscription-Identifier-Available' => Val});
|
||||
parse_property(<<16#2A, Val, Bin/binary>>, Props) ->
|
||||
parse_property(Bin, Props#{'Shared-Subscription-Available' => Val}).
|
||||
|
||||
parse_variable_byte_integer(Bin) ->
|
||||
parse_variable_byte_integer(Bin, 1, 0).
|
||||
parse_variable_byte_integer(<<1:1, Len:7, Rest/binary>>, Multiplier, Value) ->
|
||||
parse_variable_byte_integer(Rest, Multiplier * ?HIGHBIT, Value + Len * Multiplier);
|
||||
parse_variable_byte_integer(<<0:1, Len:7, Rest/binary>>, Multiplier, Value) ->
|
||||
{Value + Len * Multiplier, Rest}.
|
||||
|
||||
parse_topic_filters(subscribe, Bin) ->
|
||||
[{Topic, #{rh => Rh, rap => Rap, nl => Nl, qos => QoS}}
|
||||
|| <<Len:16/big, Topic:Len/binary, _:2, Rh:2, Rap:1, Nl:1, QoS:2>> <= Bin];
|
||||
|
||||
parse_topic_filters(unsubscribe, Bin) ->
|
||||
[Topic || <<Len:16/big, Topic:Len/binary>> <= Bin].
|
||||
|
||||
parse_reason_codes(Bin) ->
|
||||
[Code || <<Code>> <= Bin].
|
||||
|
||||
parse_utf8_pair(<<Len1:16/big, Key:Len1/binary,
|
||||
Len2:16/big, Val:Len2/binary, Rest/binary>>) ->
|
||||
{{Key, Val}, Rest}.
|
||||
|
||||
parse_utf8_string(Bin, false) ->
|
||||
{undefined, Bin};
|
||||
parse_utf8_string(Bin, true) ->
|
||||
parse_utf8_string(Bin).
|
||||
|
||||
parse_utf8_string(<<Len:16/big, Str:Len/binary, Rest/binary>>) ->
|
||||
{Str, Rest}.
|
||||
|
||||
parse_binary_data(<<Len:16/big, Data:Len/binary, Rest/binary>>) ->
|
||||
{Data, Rest}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Serialize MQTT Packet
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
serialize_fun() -> serialize_fun(?DEFAULT_OPTIONS).
|
||||
|
||||
serialize_fun(#mqtt_packet_connect{proto_ver = ProtoVer, properties = ConnProps}) ->
|
||||
MaxSize = get_property('Maximum-Packet-Size', ConnProps, ?MAX_PACKET_SIZE),
|
||||
serialize_fun(#{version => ProtoVer, max_size => MaxSize});
|
||||
|
||||
serialize_fun(#{version := Ver, max_size := MaxSize}) ->
|
||||
fun(Packet) ->
|
||||
IoData = serialize(Packet, Ver),
|
||||
case is_too_large(IoData, MaxSize) of
|
||||
true -> <<>>;
|
||||
false -> IoData
|
||||
end
|
||||
end.
|
||||
|
||||
-spec(serialize(#mqtt_packet{}) -> iodata()).
|
||||
serialize(Packet) -> serialize(Packet, ?MQTT_PROTO_V4).
|
||||
|
||||
-spec(serialize(#mqtt_packet{}, version()) -> iodata()).
|
||||
serialize(#mqtt_packet{header = Header,
|
||||
variable = Variable,
|
||||
payload = Payload}, Ver) ->
|
||||
serialize(Header, serialize_variable(Variable, Ver), serialize_payload(Payload)).
|
||||
|
||||
serialize(#mqtt_packet_header{type = Type,
|
||||
dup = Dup,
|
||||
qos = QoS,
|
||||
retain = Retain
|
||||
}, VariableBin, PayloadBin)
|
||||
when ?CONNECT =< Type andalso Type =< ?AUTH ->
|
||||
Len = iolist_size(VariableBin) + iolist_size(PayloadBin),
|
||||
[<<Type:4, (flag(Dup)):1, (flag(QoS)):2, (flag(Retain)):1>>,
|
||||
serialize_remaining_len(Len), VariableBin, PayloadBin].
|
||||
|
||||
serialize_variable(#mqtt_packet_connect{
|
||||
proto_name = ProtoName,
|
||||
proto_ver = ProtoVer,
|
||||
is_bridge = IsBridge,
|
||||
clean_start = CleanStart,
|
||||
will_flag = WillFlag,
|
||||
will_qos = WillQoS,
|
||||
will_retain = WillRetain,
|
||||
keepalive = KeepAlive,
|
||||
properties = Properties,
|
||||
clientid = ClientId,
|
||||
will_props = WillProps,
|
||||
will_topic = WillTopic,
|
||||
will_payload = WillPayload,
|
||||
username = Username,
|
||||
password = Password}, _Ver) ->
|
||||
[serialize_binary_data(ProtoName),
|
||||
<<(case IsBridge of
|
||||
true -> 16#80 + ProtoVer;
|
||||
false -> ProtoVer
|
||||
end):8,
|
||||
(flag(Username)):1,
|
||||
(flag(Password)):1,
|
||||
(flag(WillRetain)):1,
|
||||
WillQoS:2,
|
||||
(flag(WillFlag)):1,
|
||||
(flag(CleanStart)):1,
|
||||
0:1,
|
||||
KeepAlive:16/big-unsigned-integer>>,
|
||||
serialize_properties(Properties, ProtoVer),
|
||||
serialize_utf8_string(ClientId),
|
||||
case WillFlag of
|
||||
true -> [serialize_properties(WillProps, ProtoVer),
|
||||
serialize_utf8_string(WillTopic),
|
||||
serialize_binary_data(WillPayload)];
|
||||
false -> <<>>
|
||||
end,
|
||||
serialize_utf8_string(Username, true),
|
||||
serialize_utf8_string(Password, true)];
|
||||
|
||||
serialize_variable(#mqtt_packet_connack{ack_flags = AckFlags,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties}, Ver) ->
|
||||
[AckFlags, ReasonCode, serialize_properties(Properties, Ver)];
|
||||
|
||||
serialize_variable(#mqtt_packet_publish{topic_name = TopicName,
|
||||
packet_id = PacketId,
|
||||
properties = Properties}, Ver) ->
|
||||
[serialize_utf8_string(TopicName),
|
||||
if
|
||||
PacketId =:= undefined -> <<>>;
|
||||
true -> <<PacketId:16/big-unsigned-integer>>
|
||||
end,
|
||||
serialize_properties(Properties, Ver)];
|
||||
|
||||
serialize_variable(#mqtt_packet_puback{packet_id = PacketId}, Ver)
|
||||
when Ver == ?MQTT_PROTO_V3; Ver == ?MQTT_PROTO_V4 ->
|
||||
<<PacketId:16/big-unsigned-integer>>;
|
||||
serialize_variable(#mqtt_packet_puback{packet_id = PacketId,
|
||||
reason_code = ReasonCode,
|
||||
properties = Properties
|
||||
},
|
||||
Ver = ?MQTT_PROTO_V5) ->
|
||||
[<<PacketId:16/big-unsigned-integer>>, ReasonCode,
|
||||
serialize_properties(Properties, Ver)];
|
||||
|
||||
serialize_variable(#mqtt_packet_subscribe{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
topic_filters = TopicFilters}, Ver) ->
|
||||
[<<PacketId:16/big-unsigned-integer>>, serialize_properties(Properties, Ver),
|
||||
serialize_topic_filters(subscribe, TopicFilters, Ver)];
|
||||
|
||||
serialize_variable(#mqtt_packet_suback{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
reason_codes = ReasonCodes}, Ver) ->
|
||||
[<<PacketId:16/big-unsigned-integer>>, serialize_properties(Properties, Ver),
|
||||
serialize_reason_codes(ReasonCodes)];
|
||||
|
||||
serialize_variable(#mqtt_packet_unsubscribe{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
topic_filters = TopicFilters}, Ver) ->
|
||||
[<<PacketId:16/big-unsigned-integer>>, serialize_properties(Properties, Ver),
|
||||
serialize_topic_filters(unsubscribe, TopicFilters, Ver)];
|
||||
|
||||
serialize_variable(#mqtt_packet_unsuback{packet_id = PacketId,
|
||||
properties = Properties,
|
||||
reason_codes = ReasonCodes}, Ver) ->
|
||||
[<<PacketId:16/big-unsigned-integer>>, serialize_properties(Properties, Ver),
|
||||
serialize_reason_codes(ReasonCodes)];
|
||||
|
||||
serialize_variable(#mqtt_packet_disconnect{}, Ver)
|
||||
when Ver == ?MQTT_PROTO_V3; Ver == ?MQTT_PROTO_V4 ->
|
||||
<<>>;
|
||||
|
||||
serialize_variable(#mqtt_packet_disconnect{reason_code = ReasonCode,
|
||||
properties = Properties},
|
||||
Ver = ?MQTT_PROTO_V5) ->
|
||||
[ReasonCode, serialize_properties(Properties, Ver)];
|
||||
serialize_variable(#mqtt_packet_disconnect{}, _Ver) ->
|
||||
<<>>;
|
||||
|
||||
serialize_variable(#mqtt_packet_auth{reason_code = ReasonCode,
|
||||
properties = Properties},
|
||||
Ver = ?MQTT_PROTO_V5) ->
|
||||
[ReasonCode, serialize_properties(Properties, Ver)];
|
||||
|
||||
serialize_variable(PacketId, ?MQTT_PROTO_V3) when is_integer(PacketId) ->
|
||||
<<PacketId:16/big-unsigned-integer>>;
|
||||
serialize_variable(PacketId, ?MQTT_PROTO_V4) when is_integer(PacketId) ->
|
||||
<<PacketId:16/big-unsigned-integer>>;
|
||||
serialize_variable(undefined, _Ver) ->
|
||||
<<>>.
|
||||
|
||||
serialize_payload(undefined) -> <<>>;
|
||||
serialize_payload(Bin) -> Bin.
|
||||
|
||||
serialize_properties(_Props, Ver) when Ver =/= ?MQTT_PROTO_V5 ->
|
||||
<<>>;
|
||||
serialize_properties(Props, ?MQTT_PROTO_V5) ->
|
||||
serialize_properties(Props).
|
||||
|
||||
serialize_properties(undefined) ->
|
||||
<<0>>;
|
||||
serialize_properties(Props) when map_size(Props) == 0 ->
|
||||
<<0>>;
|
||||
serialize_properties(Props) when is_map(Props) ->
|
||||
Bin = << <<(serialize_property(Prop, Val))/binary>> || {Prop, Val} <- maps:to_list(Props) >>,
|
||||
[serialize_variable_byte_integer(byte_size(Bin)), Bin].
|
||||
|
||||
serialize_property(_, undefined) ->
|
||||
<<>>;
|
||||
serialize_property('Payload-Format-Indicator', Val) ->
|
||||
<<16#01, Val>>;
|
||||
serialize_property('Message-Expiry-Interval', Val) ->
|
||||
<<16#02, Val:32/big>>;
|
||||
serialize_property('Content-Type', Val) ->
|
||||
<<16#03, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Response-Topic', Val) ->
|
||||
<<16#08, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Correlation-Data', Val) ->
|
||||
<<16#09, (byte_size(Val)):16, Val/binary>>;
|
||||
serialize_property('Subscription-Identifier', Val) ->
|
||||
<<16#0B, (serialize_variable_byte_integer(Val))/binary>>;
|
||||
serialize_property('Session-Expiry-Interval', Val) ->
|
||||
<<16#11, Val:32/big>>;
|
||||
serialize_property('Assigned-Client-Identifier', Val) ->
|
||||
<<16#12, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Server-Keep-Alive', Val) ->
|
||||
<<16#13, Val:16/big>>;
|
||||
serialize_property('Authentication-Method', Val) ->
|
||||
<<16#15, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Authentication-Data', Val) ->
|
||||
<<16#16, (iolist_size(Val)):16, Val/binary>>;
|
||||
serialize_property('Request-Problem-Information', Val) ->
|
||||
<<16#17, Val>>;
|
||||
serialize_property('Will-Delay-Interval', Val) ->
|
||||
<<16#18, Val:32/big>>;
|
||||
serialize_property('Request-Response-Information', Val) ->
|
||||
<<16#19, Val>>;
|
||||
serialize_property('Response-Information', Val) ->
|
||||
<<16#1A, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Server-Reference', Val) ->
|
||||
<<16#1C, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Reason-String', Val) ->
|
||||
<<16#1F, (serialize_utf8_string(Val))/binary>>;
|
||||
serialize_property('Receive-Maximum', Val) ->
|
||||
<<16#21, Val:16/big>>;
|
||||
serialize_property('Topic-Alias-Maximum', Val) ->
|
||||
<<16#22, Val:16/big>>;
|
||||
serialize_property('Topic-Alias', Val) ->
|
||||
<<16#23, Val:16/big>>;
|
||||
serialize_property('Maximum-QoS', Val) ->
|
||||
<<16#24, Val>>;
|
||||
serialize_property('Retain-Available', Val) ->
|
||||
<<16#25, Val>>;
|
||||
serialize_property('User-Property', {Key, Val}) ->
|
||||
<<16#26, (serialize_utf8_pair({Key, Val}))/binary>>;
|
||||
serialize_property('User-Property', Props) when is_list(Props) ->
|
||||
<< <<(serialize_property('User-Property', {Key, Val}))/binary>>
|
||||
|| {Key, Val} <- Props >>;
|
||||
serialize_property('Maximum-Packet-Size', Val) ->
|
||||
<<16#27, Val:32/big>>;
|
||||
serialize_property('Wildcard-Subscription-Available', Val) ->
|
||||
<<16#28, Val>>;
|
||||
serialize_property('Subscription-Identifier-Available', Val) ->
|
||||
<<16#29, Val>>;
|
||||
serialize_property('Shared-Subscription-Available', Val) ->
|
||||
<<16#2A, Val>>.
|
||||
|
||||
serialize_topic_filters(subscribe, TopicFilters, ?MQTT_PROTO_V5) ->
|
||||
<< <<(serialize_utf8_string(Topic))/binary,
|
||||
?RESERVED:2, Rh:2, (flag(Rap)):1,(flag(Nl)):1, QoS:2 >>
|
||||
|| {Topic, #{rh := Rh, rap := Rap, nl := Nl, qos := QoS}} <- TopicFilters >>;
|
||||
|
||||
serialize_topic_filters(subscribe, TopicFilters, _Ver) ->
|
||||
<< <<(serialize_utf8_string(Topic))/binary, ?RESERVED:6, QoS:2>>
|
||||
|| {Topic, #{qos := QoS}} <- TopicFilters >>;
|
||||
|
||||
serialize_topic_filters(unsubscribe, TopicFilters, _Ver) ->
|
||||
<< <<(serialize_utf8_string(Topic))/binary>> || Topic <- TopicFilters >>.
|
||||
|
||||
serialize_reason_codes(undefined) ->
|
||||
<<>>;
|
||||
serialize_reason_codes(ReasonCodes) when is_list(ReasonCodes) ->
|
||||
<< <<Code>> || Code <- ReasonCodes >>.
|
||||
|
||||
serialize_utf8_pair({Name, Value}) ->
|
||||
<< (serialize_utf8_string(Name))/binary, (serialize_utf8_string(Value))/binary >>.
|
||||
|
||||
serialize_binary_data(Bin) ->
|
||||
[<<(byte_size(Bin)):16/big-unsigned-integer>>, Bin].
|
||||
|
||||
serialize_utf8_string(undefined, false) ->
|
||||
error(utf8_string_undefined);
|
||||
serialize_utf8_string(undefined, true) ->
|
||||
<<>>;
|
||||
serialize_utf8_string(String, _AllowNull) ->
|
||||
serialize_utf8_string(String).
|
||||
|
||||
serialize_utf8_string(String) ->
|
||||
StringBin = unicode:characters_to_binary(String),
|
||||
Len = byte_size(StringBin),
|
||||
true = (Len =< 16#ffff),
|
||||
<<Len:16/big, StringBin/binary>>.
|
||||
|
||||
serialize_remaining_len(I) ->
|
||||
serialize_variable_byte_integer(I).
|
||||
|
||||
serialize_variable_byte_integer(N) when N =< ?LOWBITS ->
|
||||
<<0:1, N:7>>;
|
||||
serialize_variable_byte_integer(N) ->
|
||||
<<1:1, (N rem ?HIGHBIT):7, (serialize_variable_byte_integer(N div ?HIGHBIT))/binary>>.
|
||||
|
||||
%% Is the frame too large?
|
||||
-spec(is_too_large(iodata(), pos_integer()) -> boolean()).
|
||||
is_too_large(IoData, MaxSize) ->
|
||||
iolist_size(IoData) >= MaxSize.
|
||||
|
||||
get_property(_Key, undefined, Default) ->
|
||||
Default;
|
||||
get_property(Key, Props, Default) ->
|
||||
maps:get(Key, Props, Default).
|
||||
|
||||
%% Validate header if sctrict mode. See: mqtt-v5.0: 2.1.3 Flags
|
||||
validate_header(?CONNECT, 0, 0, 0) -> ok;
|
||||
validate_header(?CONNACK, 0, 0, 0) -> ok;
|
||||
validate_header(?PUBLISH, 0, ?QOS_0, _) -> ok;
|
||||
validate_header(?PUBLISH, _, ?QOS_1, _) -> ok;
|
||||
validate_header(?PUBLISH, 0, ?QOS_2, _) -> ok;
|
||||
validate_header(?PUBACK, 0, 0, 0) -> ok;
|
||||
validate_header(?PUBREC, 0, 0, 0) -> ok;
|
||||
validate_header(?PUBREL, 0, 1, 0) -> ok;
|
||||
validate_header(?PUBCOMP, 0, 0, 0) -> ok;
|
||||
validate_header(?SUBSCRIBE, 0, 1, 0) -> ok;
|
||||
validate_header(?SUBACK, 0, 0, 0) -> ok;
|
||||
validate_header(?UNSUBSCRIBE, 0, 1, 0) -> ok;
|
||||
validate_header(?UNSUBACK, 0, 0, 0) -> ok;
|
||||
validate_header(?PINGREQ, 0, 0, 0) -> ok;
|
||||
validate_header(?PINGRESP, 0, 0, 0) -> ok;
|
||||
validate_header(?DISCONNECT, 0, 0, 0) -> ok;
|
||||
validate_header(?AUTH, 0, 0, 0) -> ok;
|
||||
validate_header(_Type, _Dup, _QoS, _Rt) -> error(bad_frame_header).
|
||||
|
||||
-compile({inline, [validate_packet_id/1]}).
|
||||
validate_packet_id(0) -> error(bad_packet_id);
|
||||
validate_packet_id(_) -> ok.
|
||||
|
||||
validate_subqos([3|_]) -> error(bad_subqos);
|
||||
validate_subqos([_|T]) -> validate_subqos(T);
|
||||
validate_subqos([]) -> ok.
|
||||
|
||||
bool(0) -> false;
|
||||
bool(1) -> true.
|
||||
|
||||
flag(undefined) -> ?RESERVED;
|
||||
flag(false) -> 0;
|
||||
flag(true) -> 1;
|
||||
flag(X) when is_integer(X) -> X;
|
||||
flag(B) when is_binary(B) -> 1.
|
||||
|
||||
fixqos(?PUBREL, 0) -> 1;
|
||||
fixqos(?SUBSCRIBE, 0) -> 1;
|
||||
fixqos(?UNSUBSCRIBE, 0) -> 1;
|
||||
fixqos(_Type, QoS) -> QoS.
|
||||
|
||||
172
apps/iot/src/emqtt/emqtt_props.erl
Normal file
172
apps/iot/src/emqtt/emqtt_props.erl
Normal file
@ -0,0 +1,172 @@
|
||||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
%% @doc MQTT5 Properties
|
||||
-module(emqtt_props).
|
||||
|
||||
-include("emqtt.hrl").
|
||||
|
||||
-export([id/1, name/1, filter/2, validate/1]).
|
||||
|
||||
%% For tests
|
||||
-export([all/0]).
|
||||
|
||||
-type(prop_name() :: atom()).
|
||||
-type(prop_id() :: pos_integer()).
|
||||
|
||||
-define(PROPS_TABLE,
|
||||
#{16#01 => {'Payload-Format-Indicator', 'Byte', [?PUBLISH]},
|
||||
16#02 => {'Message-Expiry-Interval', 'Four-Byte-Integer', [?PUBLISH]},
|
||||
16#03 => {'Content-Type', 'UTF8-Encoded-String', [?PUBLISH]},
|
||||
16#08 => {'Response-Topic', 'UTF8-Encoded-String', [?PUBLISH]},
|
||||
16#09 => {'Correlation-Data', 'Binary-Data', [?PUBLISH]},
|
||||
16#0B => {'Subscription-Identifier', 'Variable-Byte-Integer', [?PUBLISH, ?SUBSCRIBE]},
|
||||
16#11 => {'Session-Expiry-Interval', 'Four-Byte-Integer', [?CONNECT, ?CONNACK, ?DISCONNECT]},
|
||||
16#12 => {'Assigned-Client-Identifier', 'UTF8-Encoded-String', [?CONNACK]},
|
||||
16#13 => {'Server-Keep-Alive', 'Two-Byte-Integer', [?CONNACK]},
|
||||
16#15 => {'Authentication-Method', 'UTF8-Encoded-String', [?CONNECT, ?CONNACK, ?AUTH]},
|
||||
16#16 => {'Authentication-Data', 'Binary-Data', [?CONNECT, ?CONNACK, ?AUTH]},
|
||||
16#17 => {'Request-Problem-Information', 'Byte', [?CONNECT]},
|
||||
16#18 => {'Will-Delay-Interval', 'Four-Byte-Integer', ['WILL']},
|
||||
16#19 => {'Request-Response-Information', 'Byte', [?CONNECT]},
|
||||
16#1A => {'Response-Information', 'UTF8-Encoded-String', [?CONNACK]},
|
||||
16#1C => {'Server-Reference', 'UTF8-Encoded-String', [?CONNACK, ?DISCONNECT]},
|
||||
16#1F => {'Reason-String', 'UTF8-Encoded-String', [?CONNACK, ?DISCONNECT, ?PUBACK,
|
||||
?PUBREC, ?PUBREL, ?PUBCOMP,
|
||||
?SUBACK, ?UNSUBACK, ?AUTH]},
|
||||
16#21 => {'Receive-Maximum', 'Two-Byte-Integer', [?CONNECT, ?CONNACK]},
|
||||
16#22 => {'Topic-Alias-Maximum', 'Two-Byte-Integer', [?CONNECT, ?CONNACK]},
|
||||
16#23 => {'Topic-Alias', 'Two-Byte-Integer', [?PUBLISH]},
|
||||
16#24 => {'Maximum-QoS', 'Byte', [?CONNACK]},
|
||||
16#25 => {'Retain-Available', 'Byte', [?CONNACK]},
|
||||
16#26 => {'User-Property', 'UTF8-String-Pair', 'ALL'},
|
||||
16#27 => {'Maximum-Packet-Size', 'Four-Byte-Integer', [?CONNECT, ?CONNACK]},
|
||||
16#28 => {'Wildcard-Subscription-Available', 'Byte', [?CONNACK]},
|
||||
16#29 => {'Subscription-Identifier-Available', 'Byte', [?CONNACK]},
|
||||
16#2A => {'Shared-Subscription-Available', 'Byte', [?CONNACK]}
|
||||
}).
|
||||
|
||||
-spec(id(prop_name()) -> prop_id()).
|
||||
id('Payload-Format-Indicator') -> 16#01;
|
||||
id('Message-Expiry-Interval') -> 16#02;
|
||||
id('Content-Type') -> 16#03;
|
||||
id('Response-Topic') -> 16#08;
|
||||
id('Correlation-Data') -> 16#09;
|
||||
id('Subscription-Identifier') -> 16#0B;
|
||||
id('Session-Expiry-Interval') -> 16#11;
|
||||
id('Assigned-Client-Identifier') -> 16#12;
|
||||
id('Server-Keep-Alive') -> 16#13;
|
||||
id('Authentication-Method') -> 16#15;
|
||||
id('Authentication-Data') -> 16#16;
|
||||
id('Request-Problem-Information') -> 16#17;
|
||||
id('Will-Delay-Interval') -> 16#18;
|
||||
id('Request-Response-Information') -> 16#19;
|
||||
id('Response-Information') -> 16#1A;
|
||||
id('Server-Reference') -> 16#1C;
|
||||
id('Reason-String') -> 16#1F;
|
||||
id('Receive-Maximum') -> 16#21;
|
||||
id('Topic-Alias-Maximum') -> 16#22;
|
||||
id('Topic-Alias') -> 16#23;
|
||||
id('Maximum-QoS') -> 16#24;
|
||||
id('Retain-Available') -> 16#25;
|
||||
id('User-Property') -> 16#26;
|
||||
id('Maximum-Packet-Size') -> 16#27;
|
||||
id('Wildcard-Subscription-Available') -> 16#28;
|
||||
id('Subscription-Identifier-Available') -> 16#29;
|
||||
id('Shared-Subscription-Available') -> 16#2A;
|
||||
id(Name) -> error({bad_property, Name}).
|
||||
|
||||
-spec(name(prop_id()) -> prop_name()).
|
||||
name(16#01) -> 'Payload-Format-Indicator';
|
||||
name(16#02) -> 'Message-Expiry-Interval';
|
||||
name(16#03) -> 'Content-Type';
|
||||
name(16#08) -> 'Response-Topic';
|
||||
name(16#09) -> 'Correlation-Data';
|
||||
name(16#0B) -> 'Subscription-Identifier';
|
||||
name(16#11) -> 'Session-Expiry-Interval';
|
||||
name(16#12) -> 'Assigned-Client-Identifier';
|
||||
name(16#13) -> 'Server-Keep-Alive';
|
||||
name(16#15) -> 'Authentication-Method';
|
||||
name(16#16) -> 'Authentication-Data';
|
||||
name(16#17) -> 'Request-Problem-Information';
|
||||
name(16#18) -> 'Will-Delay-Interval';
|
||||
name(16#19) -> 'Request-Response-Information';
|
||||
name(16#1A) -> 'Response-Information';
|
||||
name(16#1C) -> 'Server-Reference';
|
||||
name(16#1F) -> 'Reason-String';
|
||||
name(16#21) -> 'Receive-Maximum';
|
||||
name(16#22) -> 'Topic-Alias-Maximum';
|
||||
name(16#23) -> 'Topic-Alias';
|
||||
name(16#24) -> 'Maximum-QoS';
|
||||
name(16#25) -> 'Retain-Available';
|
||||
name(16#26) -> 'User-Property';
|
||||
name(16#27) -> 'Maximum-Packet-Size';
|
||||
name(16#28) -> 'Wildcard-Subscription-Available';
|
||||
name(16#29) -> 'Subscription-Identifier-Available';
|
||||
name(16#2A) -> 'Shared-Subscription-Available';
|
||||
name(Id) -> error({unsupported_property, Id}).
|
||||
|
||||
filter(PacketType, Props) when is_map(Props) ->
|
||||
maps:from_list(filter(PacketType, maps:to_list(Props)));
|
||||
|
||||
filter(PacketType, Props) when ?CONNECT =< PacketType, PacketType =< ?AUTH, is_list(Props) ->
|
||||
Filter = fun(Name) ->
|
||||
case maps:find(id(Name), ?PROPS_TABLE) of
|
||||
{ok, {Name, _Type, 'ALL'}} ->
|
||||
true;
|
||||
{ok, {Name, _Type, AllowedTypes}} ->
|
||||
lists:member(PacketType, AllowedTypes);
|
||||
error -> false
|
||||
end
|
||||
end,
|
||||
[Prop || Prop = {Name, _} <- Props, Filter(Name)].
|
||||
|
||||
validate(Props) when is_map(Props) ->
|
||||
lists:foreach(fun validate_prop/1, maps:to_list(Props)).
|
||||
|
||||
validate_prop(Prop = {Name, Val}) ->
|
||||
case maps:find(id(Name), ?PROPS_TABLE) of
|
||||
{ok, {Name, Type, _}} ->
|
||||
validate_value(Type, Val)
|
||||
orelse error(bad_property, Prop);
|
||||
error ->
|
||||
error({bad_property, Prop})
|
||||
end.
|
||||
|
||||
validate_value('Byte', Val) ->
|
||||
is_integer(Val) andalso Val =< 16#FF;
|
||||
validate_value('Two-Byte-Integer', Val) ->
|
||||
is_integer(Val) andalso 0 =< Val andalso Val =< 16#FFFF;
|
||||
validate_value('Four-Byte-Integer', Val) ->
|
||||
is_integer(Val) andalso 0 =< Val andalso Val =< 16#FFFFFFFF;
|
||||
validate_value('Variable-Byte-Integer', Val) ->
|
||||
is_integer(Val) andalso 0 =< Val andalso Val =< 16#7FFFFFFF;
|
||||
validate_value('UTF8-String-Pair', {Name, Val}) ->
|
||||
validate_value('UTF8-Encoded-String', Name)
|
||||
andalso validate_value('UTF8-Encoded-String', Val);
|
||||
validate_value('UTF8-String-Pair', Pairs) when is_list(Pairs) ->
|
||||
lists:foldl(fun(Pair, OK) ->
|
||||
OK andalso validate_value('UTF8-String-Pair', Pair)
|
||||
end, true, Pairs);
|
||||
validate_value('UTF8-Encoded-String', Val) ->
|
||||
is_binary(Val);
|
||||
validate_value('Binary-Data', Val) ->
|
||||
is_binary(Val);
|
||||
validate_value(_Type, _Val) -> false.
|
||||
|
||||
-spec(all() -> map()).
|
||||
all() -> ?PROPS_TABLE.
|
||||
|
||||
120
apps/iot/src/emqtt/emqtt_sock.erl
Normal file
120
apps/iot/src/emqtt/emqtt_sock.erl
Normal file
@ -0,0 +1,120 @@
|
||||
%%--------------------------------------------------------------------
|
||||
%% Copyright (c) 2020 EMQ Technologies Co., Ltd. All Rights Reserved.
|
||||
%%
|
||||
%% Licensed under the Apache License, Version 2.0 (the "License");
|
||||
%% you may not use this file except in compliance with the License.
|
||||
%% You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing, software
|
||||
%% distributed under the License is distributed on an "AS IS" BASIS,
|
||||
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
%% See the License for the specific language governing permissions and
|
||||
%% limitations under the License.
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
-module(emqtt_sock).
|
||||
|
||||
-export([connect/4, send/2, recv/2, close/1 ]).
|
||||
|
||||
-export([ sockname/1, setopts/2, getstat/2 ]).
|
||||
|
||||
-record(ssl_socket, {
|
||||
tcp,
|
||||
ssl
|
||||
}).
|
||||
|
||||
-type(socket() :: inet:socket() | #ssl_socket{}).
|
||||
|
||||
-type(sockname() :: {inet:ip_address(), inet:port_number()}).
|
||||
|
||||
-type(option() :: gen_tcp:connect_option() | {ssl_opts, [ssl:ssl_option()]}).
|
||||
|
||||
-export_type([socket/0, option/0]).
|
||||
|
||||
-define(DEFAULT_TCP_OPTIONS, [binary, {packet, raw}, {active, false},
|
||||
{nodelay, true}]).
|
||||
|
||||
-spec(connect(inet:ip_address() | inet:hostname(),
|
||||
inet:port_number(), [option()], timeout())
|
||||
-> {ok, socket()} | {error, term()}).
|
||||
connect(Host, Port, SockOpts, Timeout) ->
|
||||
TcpOpts = merge_opts(?DEFAULT_TCP_OPTIONS,
|
||||
lists:keydelete(ssl_opts, 1, SockOpts)),
|
||||
case gen_tcp:connect(Host, Port, TcpOpts, Timeout) of
|
||||
{ok, Sock} ->
|
||||
case lists:keyfind(ssl_opts, 1, SockOpts) of
|
||||
{ssl_opts, SslOpts} ->
|
||||
ssl_upgrade(Sock, SslOpts, Timeout);
|
||||
false ->
|
||||
{ok, Sock}
|
||||
end;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
ssl_upgrade(Sock, SslOpts, Timeout) ->
|
||||
TlsVersions = proplists:get_value(versions, SslOpts, []),
|
||||
Ciphers = proplists:get_value(ciphers, SslOpts, default_ciphers(TlsVersions)),
|
||||
SslOpts2 = merge_opts(SslOpts, [{ciphers, Ciphers}]),
|
||||
case ssl:connect(Sock, SslOpts2, Timeout) of
|
||||
{ok, SslSock} ->
|
||||
ok = ssl:controlling_process(SslSock, self()),
|
||||
{ok, #ssl_socket{tcp = Sock, ssl = SslSock}};
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec(send(socket(), iodata()) -> ok | {error, einval | closed}).
|
||||
send(Sock, Data) when is_port(Sock) ->
|
||||
gen_tcp:send(Sock, Data);
|
||||
send(#ssl_socket{ssl = SslSock}, Data) ->
|
||||
ssl:send(SslSock, Data).
|
||||
|
||||
-spec(recv(socket(), non_neg_integer())
|
||||
-> {ok, iodata()} | {error, closed | inet:posix()}).
|
||||
recv(Sock, Length) when is_port(Sock) ->
|
||||
gen_tcp:recv(Sock, Length);
|
||||
recv(#ssl_socket{ssl = SslSock}, Length) ->
|
||||
ssl:recv(SslSock, Length).
|
||||
|
||||
-spec(close(socket()) -> ok).
|
||||
close(Sock) when is_port(Sock) ->
|
||||
gen_tcp:close(Sock);
|
||||
close(#ssl_socket{ssl = SslSock}) ->
|
||||
ssl:close(SslSock).
|
||||
|
||||
-spec(setopts(socket(), [gen_tcp:option() | ssl:socketoption()]) -> ok).
|
||||
setopts(Sock, Opts) when is_port(Sock) ->
|
||||
inet:setopts(Sock, Opts);
|
||||
setopts(#ssl_socket{ssl = SslSock}, Opts) ->
|
||||
ssl:setopts(SslSock, Opts).
|
||||
|
||||
-spec(getstat(socket(), [atom()])
|
||||
-> {ok, [{atom(), integer()}]} | {error, term()}).
|
||||
getstat(Sock, Options) when is_port(Sock) ->
|
||||
inet:getstat(Sock, Options);
|
||||
getstat(#ssl_socket{tcp = Sock}, Options) ->
|
||||
inet:getstat(Sock, Options).
|
||||
|
||||
-spec(sockname(socket()) -> {ok, sockname()} | {error, term()}).
|
||||
sockname(Sock) when is_port(Sock) ->
|
||||
inet:sockname(Sock);
|
||||
sockname(#ssl_socket{ssl = SslSock}) ->
|
||||
ssl:sockname(SslSock).
|
||||
|
||||
-spec(merge_opts(list(), list()) -> list()).
|
||||
merge_opts(Defaults, Options) ->
|
||||
lists:foldl(
|
||||
fun({Opt, Val}, Acc) ->
|
||||
lists:keystore(Opt, 1, Acc, {Opt, Val});
|
||||
(Opt, Acc) ->
|
||||
lists:usort([Opt | Acc])
|
||||
end, Defaults, Options).
|
||||
|
||||
default_ciphers(TlsVersions) ->
|
||||
lists:foldl(
|
||||
fun(TlsVer, Ciphers) ->
|
||||
Ciphers ++ ssl:cipher_suites(all, TlsVer)
|
||||
end, [], TlsVersions).
|
||||
170
apps/iot/src/endpoint/iot_http_endpoint.erl
Normal file
170
apps/iot/src/endpoint/iot_http_endpoint.erl
Normal file
@ -0,0 +1,170 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 06. 7月 2023 12:02
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_http_endpoint).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_statem).
|
||||
|
||||
%% API
|
||||
-export([start_link/2]).
|
||||
-export([get_pid/1, forward/4, get_stat/0]).
|
||||
|
||||
%% gen_statem callbacks
|
||||
-export([init/1, handle_event/4, terminate/3, code_change/4, callback_mode/0]).
|
||||
|
||||
%% 消息重发间隔
|
||||
-define(RETRY_INTERVAL, 5000).
|
||||
|
||||
-record(state, {
|
||||
postman_pid :: undefined | pid(),
|
||||
pool_size = 0,
|
||||
flight_num = 0,
|
||||
id = 1,
|
||||
queue :: queue:queue(),
|
||||
%% 定时器对应关系
|
||||
timer_map = #{},
|
||||
%% 记录成功处理的消息数
|
||||
acc_num = 0
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
-spec get_pid(Name :: atom()) -> undefined | pid().
|
||||
get_pid(Name) when is_atom(Name) ->
|
||||
whereis(Name).
|
||||
|
||||
-spec forward(Pid :: pid(), LocationCode :: binary(), EventType :: integer(), Params :: map()) -> no_return().
|
||||
forward(Pid, LocationCode, EventType, Params) when is_pid(Pid), is_binary(LocationCode), is_integer(EventType), is_map(Params) ->
|
||||
gen_statem:cast(Pid, {forward, LocationCode, EventType, Params}).
|
||||
|
||||
-spec get_stat() -> {ok, Stat :: #{}}.
|
||||
get_stat() ->
|
||||
gen_statem:call(?MODULE, get_stat, 5000).
|
||||
|
||||
%% @doc Creates a gen_statem process which calls Module:init/1 to
|
||||
%% initialize. To ensure a synchronized start-up procedure, this
|
||||
%% function does not return until Module:init/1 has returned.
|
||||
start_link(Name, Opts) when is_atom(Name), is_list(Opts) ->
|
||||
gen_statem:start_link({local, Name}, ?MODULE, [Opts], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_statem callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Whenever a gen_statem is started using gen_statem:start/[3,4] or
|
||||
%% gen_statem:start_link/[3,4], this function is called by the new
|
||||
%% process to initialize.
|
||||
init([Opts]) ->
|
||||
PoolSize = proplists:get_value(pool_size, Opts),
|
||||
{ok, PostmanPid} = broker_postman:start_link(http_postman, Opts, PoolSize),
|
||||
|
||||
{ok, connected, #state{postman_pid = PostmanPid, pool_size = PoolSize, queue = queue:new()}}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it needs to find out
|
||||
%% the callback mode of the callback module.
|
||||
callback_mode() ->
|
||||
handle_event_function.
|
||||
|
||||
%% @private
|
||||
%% @doc There should be one instance of this function for each possible
|
||||
%% state name. If callback_mode is state_functions, one of these
|
||||
%% functions is called when gen_statem receives and event from
|
||||
%% call/2, cast/2, or as a normal process message.
|
||||
|
||||
handle_event(cast, {forward, LocationCode, EventType, Params}, _, State = #state{id = Id, flight_num = FlightNum, pool_size = PoolSize, queue = Q}) ->
|
||||
EventData = #event_data{id = Id, location_code = LocationCode, event_type = EventType, params = Params},
|
||||
%% 避免不必要的内部消息
|
||||
Actions = case FlightNum < PoolSize of
|
||||
true -> [{next_event, info, fetch_next}];
|
||||
false -> []
|
||||
end,
|
||||
{keep_state, State#state{queue = queue:in(EventData, Q), id = Id + 1}, Actions};
|
||||
|
||||
%% 触发读取下一条数据
|
||||
handle_event(info, fetch_next, _, State = #state{postman_pid = PostmanPid, queue = Q, flight_num = FlightNum, timer_map = TimerMap}) ->
|
||||
case queue:out(Q) of
|
||||
{{value, EventData = #event_data{id = Id}}, Q1} ->
|
||||
lager:debug("[iot_http_endpoint] fetch_next success, event data is: ~p", [EventData]),
|
||||
do_post(PostmanPid, EventData),
|
||||
TimerRef = erlang:start_timer(?RETRY_INTERVAL, self(), {repost_ticker, EventData}),
|
||||
|
||||
{keep_state, State#state{timer_map = maps:put(Id, TimerRef, TimerMap), queue = Q1, flight_num = FlightNum + 1}};
|
||||
{empty, Q1} ->
|
||||
{keep_state, State#state{queue = Q1}}
|
||||
end;
|
||||
|
||||
%% 收到确认消息
|
||||
handle_event(info, {ack, Id}, _, State = #state{timer_map = TimerMap, acc_num = AccNum, flight_num = FlightNum}) ->
|
||||
lager:debug("[iot_zd_endpoint] get ack: ~p", [Id]),
|
||||
case maps:take(Id, TimerMap) of
|
||||
error ->
|
||||
{keep_state, State#state{acc_num = AccNum + 1, flight_num = FlightNum - 1}, [{next_event, info, fetch_next}]};
|
||||
{TimerRef, NTimerMap} ->
|
||||
is_reference(TimerRef) andalso erlang:cancel_timer(TimerRef),
|
||||
{keep_state, State#state{timer_map = NTimerMap, acc_num = AccNum + 1, flight_num = FlightNum - 1}, [{next_event, info, fetch_next}]}
|
||||
end;
|
||||
|
||||
%% 收到重发过期请求
|
||||
handle_event(info, {timeout, _, {repost_ticker, EventData = #event_data{id = Id}}}, _, State = #state{postman_pid = PostmanPid, timer_map = TimerMap}) ->
|
||||
lager:debug("[iot_zd_endpoint] repost data: ~p", [EventData]),
|
||||
do_post(PostmanPid, EventData),
|
||||
TimerRef = erlang:start_timer(?RETRY_INTERVAL, self(), {repost_ticker, EventData}),
|
||||
|
||||
{keep_state, State#state{timer_map = maps:put(Id, TimerRef, TimerMap)}};
|
||||
|
||||
%% 获取当前统计信息
|
||||
handle_event({call, From}, get_stat, StateName, State = #state{acc_num = AccNum}) ->
|
||||
Stat = #{
|
||||
<<"acc_num">> => AccNum,
|
||||
<<"queue_num">> => mnesia_queue:table_size(),
|
||||
<<"state_name">> => atom_to_binary(StateName)
|
||||
},
|
||||
{keep_state, State, [{reply, From, Stat}]};
|
||||
|
||||
%% @private
|
||||
%% @doc If callback_mode is handle_event_function, then whenever a
|
||||
%% gen_statem receives an event from call/2, cast/2, or as a normal
|
||||
%% process message, this function is called.
|
||||
handle_event(EventType, Event, StateName, State) ->
|
||||
lager:warning("[iot_zd_endpoint] unknown message, event_type: ~p, event: ~p, state_name: ~p, state: ~p", [EventType, Event, StateName, State]),
|
||||
{keep_state, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_statem terminates with
|
||||
%% Reason. The return value is ignored.
|
||||
terminate(Reason, _StateName, #state{}) ->
|
||||
lager:debug("[iot_zd_endpoint] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
code_change(_OldVsn, StateName, State = #state{}, _Extra) ->
|
||||
{ok, StateName, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
-spec do_post(PostmanPid :: pid(), EventData :: #event_data{}) -> no_return().
|
||||
do_post(PostmanPid, #event_data{id = Id, location_code = LocationCode, event_type = EventType, params = Params}) when is_pid(PostmanPid) ->
|
||||
Data = #{
|
||||
<<"version">> => <<"1.0">>,
|
||||
<<"event_type">> => EventType,
|
||||
<<"params">> => Params
|
||||
},
|
||||
Body = iolist_to_binary(jiffy:encode(Data, [force_utf8])),
|
||||
PostmanPid ! {post, self(), #post_data{id = Id, location_code = LocationCode, body = Body}},
|
||||
ok.
|
||||
273
apps/iot/src/endpoint/iot_jinzhi_endpoint.erl
Normal file
273
apps/iot/src/endpoint/iot_jinzhi_endpoint.erl
Normal file
@ -0,0 +1,273 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 06. 7月 2023 12:02
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_jinzhi_endpoint).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_statem).
|
||||
|
||||
%% API
|
||||
-export([start_link/0]).
|
||||
-export([get_pid/0, forward/3, get_stat/0]).
|
||||
|
||||
%% gen_statem callbacks
|
||||
-export([init/1, handle_event/4, terminate/3, code_change/4, callback_mode/0]).
|
||||
|
||||
%% 消息重发间隔
|
||||
-define(RETRY_INTERVAL, 5000).
|
||||
%% 系统id
|
||||
-define(SYS_ID, <<"ZNWLZJJKXT">>).
|
||||
|
||||
-record(state, {
|
||||
url :: string(),
|
||||
logger_pid :: pid(),
|
||||
pool_size = 0,
|
||||
flight_num = 0,
|
||||
pri_key :: public_key:private_key(),
|
||||
id = 1,
|
||||
queue :: queue:queue(),
|
||||
%% 定时器对应关系
|
||||
timer_map = #{},
|
||||
%% 记录成功处理的消息数
|
||||
acc_num = 0
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
-spec get_pid() -> undefined | pid().
|
||||
get_pid() ->
|
||||
whereis(?MODULE).
|
||||
|
||||
-spec forward(LocationCode :: binary(), EventType :: integer(), Params :: map()) -> no_return().
|
||||
forward(LocationCode, EventType, Params) when is_binary(LocationCode), is_integer(EventType), is_map(Params) ->
|
||||
gen_statem:cast(?MODULE, {forward, LocationCode, EventType, Params}).
|
||||
|
||||
-spec get_stat() -> {ok, Stat :: #{}}.
|
||||
get_stat() ->
|
||||
gen_statem:call(?MODULE, get_stat, 5000).
|
||||
|
||||
%% @doc Creates a gen_statem process which calls Module:init/1 to
|
||||
%% initialize. To ensure a synchronized start-up procedure, this
|
||||
%% function does not return until Module:init/1 has returned.
|
||||
start_link() ->
|
||||
gen_statem:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_statem callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Whenever a gen_statem is started using gen_statem:start/[3,4] or
|
||||
%% gen_statem:start_link/[3,4], this function is called by the new
|
||||
%% process to initialize.
|
||||
init([]) ->
|
||||
{ok, Opts} = application:get_env(iot, jinzhi),
|
||||
|
||||
PoolSize = proplists:get_value(pool_size, Opts),
|
||||
PriFile = proplists:get_value(pri_key, Opts),
|
||||
Url = proplists:get_value(url, Opts),
|
||||
|
||||
{ok, LoggerPid} = iot_logger:start_link("ai_event_data"),
|
||||
PriKey = generate_private_key(PriFile),
|
||||
|
||||
{ok, connected, #state{url = Url, logger_pid = LoggerPid, pri_key = PriKey, pool_size = PoolSize, queue = queue:new()}}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it needs to find out
|
||||
%% the callback mode of the callback module.
|
||||
callback_mode() ->
|
||||
handle_event_function.
|
||||
|
||||
%% @private
|
||||
%% @doc There should be one instance of this function for each possible
|
||||
%% state name. If callback_mode is state_functions, one of these
|
||||
%% functions is called when gen_statem receives and event from
|
||||
%% call/2, cast/2, or as a normal process message.
|
||||
|
||||
handle_event(cast, {forward, LocationCode, EventType, Params}, _, State = #state{id = Id, flight_num = FlightNum, pool_size = PoolSize, queue = Q}) ->
|
||||
EventData = #event_data{id = Id, location_code = LocationCode, event_type = EventType, params = Params},
|
||||
%% 避免不必要的内部消息
|
||||
Actions = case FlightNum < PoolSize of
|
||||
true -> [{next_event, info, fetch_next}];
|
||||
false -> []
|
||||
end,
|
||||
{keep_state, State#state{queue = queue:in(EventData, Q), id = Id + 1}, Actions};
|
||||
|
||||
%% 触发读取下一条数据
|
||||
handle_event(info, fetch_next, _, State = #state{queue = Q, flight_num = FlightNum, timer_map = TimerMap}) ->
|
||||
case queue:out(Q) of
|
||||
{{value, EventData = #event_data{id = Id}}, Q1} ->
|
||||
lager:debug("[iot_http_endpoint] fetch_next success, event data is: ~p", [EventData]),
|
||||
catch do_post(EventData, State),
|
||||
TimerRef = erlang:start_timer(?RETRY_INTERVAL, self(), {repost_ticker, EventData}),
|
||||
|
||||
{keep_state, State#state{timer_map = maps:put(Id, TimerRef, TimerMap), queue = Q1, flight_num = FlightNum + 1}};
|
||||
{empty, Q1} ->
|
||||
{keep_state, State#state{queue = Q1}}
|
||||
end;
|
||||
|
||||
%% 收到确认消息
|
||||
handle_event(info, {ack, Id}, _, State = #state{timer_map = TimerMap, acc_num = AccNum, flight_num = FlightNum}) ->
|
||||
case maps:take(Id, TimerMap) of
|
||||
error ->
|
||||
{keep_state, State#state{acc_num = AccNum + 1, flight_num = FlightNum - 1}, [{next_event, info, fetch_next}]};
|
||||
{TimerRef, NTimerMap} ->
|
||||
is_reference(TimerRef) andalso erlang:cancel_timer(TimerRef),
|
||||
{keep_state, State#state{timer_map = NTimerMap, acc_num = AccNum + 1, flight_num = FlightNum - 1}, [{next_event, info, fetch_next}]}
|
||||
end;
|
||||
|
||||
%% 收到重发过期请求
|
||||
handle_event(info, {timeout, _, {repost_ticker, EventData = #event_data{id = Id}}}, _, State = #state{timer_map = TimerMap}) ->
|
||||
lager:debug("[iot_jinzhi_endpoint] repost data: ~p", [EventData]),
|
||||
catch do_post(EventData, State),
|
||||
TimerRef = erlang:start_timer(?RETRY_INTERVAL, self(), {repost_ticker, EventData}),
|
||||
|
||||
{keep_state, State#state{timer_map = maps:put(Id, TimerRef, TimerMap)}};
|
||||
|
||||
%% Task进程挂掉
|
||||
handle_event(info, {'DOWN', _MRef, process, _Pid, normal}, _, State) ->
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(info, {'DOWN', _MRef, process, _Pid, Reason}, _, State) ->
|
||||
lager:notice("[iot_jinzhi_endpoint] task process down with reason: ~p", [Reason]),
|
||||
{keep_state, State};
|
||||
|
||||
%% 获取当前统计信息
|
||||
handle_event({call, From}, get_stat, StateName, State = #state{acc_num = AccNum}) ->
|
||||
Stat = #{
|
||||
<<"acc_num">> => AccNum,
|
||||
<<"queue_num">> => mnesia_queue:table_size(),
|
||||
<<"state_name">> => atom_to_binary(StateName)
|
||||
},
|
||||
{keep_state, State, [{reply, From, Stat}]};
|
||||
|
||||
%% @private
|
||||
%% @doc If callback_mode is handle_event_function, then whenever a
|
||||
%% gen_statem receives an event from call/2, cast/2, or as a normal
|
||||
%% process message, this function is called.
|
||||
handle_event(EventType, Event, StateName, State) ->
|
||||
lager:warning("[iot_jinzhi_endpoint] unknown message, event_type: ~p, event: ~p, state_name: ~p, state: ~p", [EventType, Event, StateName, State]),
|
||||
{keep_state, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_statem terminates with
|
||||
%% Reason. The return value is ignored.
|
||||
terminate(Reason, _StateName, #state{}) ->
|
||||
lager:debug("[iot_jinzhi_endpoint] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
code_change(_OldVsn, StateName, State = #state{}, _Extra) ->
|
||||
{ok, StateName, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
-spec do_post(EventData :: #event_data{}, State :: #state{}) -> no_return().
|
||||
do_post(#event_data{id = Id, location_code = LocationCode, event_type = EventType,
|
||||
params = Params = #{<<"event_code">> := EventCode, <<"description">> := Description, <<"datetime">> := Datetime, <<"attachments">> := Attachments0}},
|
||||
#state{pri_key = PriKey, url = Url, logger_pid = LoggerPid}) ->
|
||||
|
||||
% <<"occurrenceTime">> => <<"2023-06-10 12:00:00">>,
|
||||
|
||||
Attachments = lists:map(fun(#{<<"filename">> := Filename}) ->
|
||||
{ok, FileUrl} = iot_util:file_uri(Filename),
|
||||
Name = filename:basename(FileUrl),
|
||||
#{<<"name">> => Name, <<"url">> => FileUrl}
|
||||
end, Attachments0),
|
||||
|
||||
DeviceInfo = #{
|
||||
<<"location">> => LocationCode,
|
||||
<<"category">> => EventCode,
|
||||
<<"description">> => Description,
|
||||
<<"occurrenceTime">> => Datetime,
|
||||
<<"attachments">> => Attachments
|
||||
},
|
||||
|
||||
ReqData = #{
|
||||
<<"sign">> => sign(DeviceInfo, PriKey),
|
||||
<<"sysId">> => ?SYS_ID,
|
||||
<<"deviceInfo">> => DeviceInfo
|
||||
},
|
||||
Body = iolist_to_binary(jiffy:encode(ReqData, [force_utf8])),
|
||||
|
||||
lager:debug("[iot_jinzhi_endpoint] do_post url: ~p, event_type: ~p, params: ~p, location_code: ~p, body: ~p", [Url, EventType, Params, LocationCode, Body]),
|
||||
|
||||
ReceiverPid = self(),
|
||||
%% 异步提交任务
|
||||
spawn_monitor(fun() ->
|
||||
Headers = [
|
||||
{<<"content-type">>, <<"application/json">>}
|
||||
],
|
||||
case hackney:request(post, Url, Headers, Body, [{pool, false}]) of
|
||||
{ok, 200, _, ClientRef} ->
|
||||
{ok, RespBody} = hackney:body(ClientRef),
|
||||
hackney:close(ClientRef),
|
||||
iot_logger:write(LoggerPid, [Body, RespBody]),
|
||||
ReceiverPid ! {ack, Id};
|
||||
{ok, HttpCode, _, ClientRef} ->
|
||||
{ok, RespBody} = hackney:body(ClientRef),
|
||||
hackney:close(ClientRef),
|
||||
lager:warning("[iot_jinzhi_endpoint] send body: ~p, get error is: ~p", [Body, {HttpCode, RespBody}]);
|
||||
{error, Reason} ->
|
||||
lager:warning("[iot_jinzhi_endpoint] send body: ~p, get error is: ~p", [Body, Reason])
|
||||
end
|
||||
end).
|
||||
|
||||
-spec generate_private_key(PriFile :: string()) -> public_key:private_key().
|
||||
generate_private_key(PriFile) when is_list(PriFile) ->
|
||||
PriKeyFile = code:priv_dir(iot) ++ "/" ++ PriFile,
|
||||
%% 私钥保存解析后的
|
||||
{ok, PriKeyData} = file:read_file(PriKeyFile),
|
||||
PriDerData = base64:decode(PriKeyData),
|
||||
public_key:der_decode('PrivateKeyInfo', PriDerData).
|
||||
|
||||
%% 数据签名
|
||||
-spec sign(M :: #{}, PrivateKey :: public_key:private_key()) -> binary().
|
||||
sign(M, PrivateKey) when is_map(M) ->
|
||||
Json = serialize(M),
|
||||
Hash = iolist_to_binary(io_lib:format("~64.16.0b", [binary:decode_unsigned(crypto:hash(sha256, Json))])),
|
||||
RsaEncoded = public_key:encrypt_private(Hash, PrivateKey),
|
||||
|
||||
base64:encode(RsaEncoded).
|
||||
|
||||
%% 简单的序列号,sign签名
|
||||
-spec serialize(M :: map()) -> JsonString :: binary().
|
||||
serialize(M) when is_map(M) ->
|
||||
L = maps:to_list(M),
|
||||
L1 = lists:sort(fun({K, _}, {K1, _}) -> K < K1 end, L),
|
||||
serialize(L1, []).
|
||||
serialize([], Target) ->
|
||||
B = iolist_to_binary(lists:join(<<$,>>, lists:reverse(Target))),
|
||||
<<${, B/binary, $}>>;
|
||||
serialize([{K, V}|T], Target) ->
|
||||
V1 = if
|
||||
is_integer(V) ->
|
||||
integer_to_binary(V);
|
||||
is_float(V) ->
|
||||
float_to_binary(V);
|
||||
is_binary(V) ->
|
||||
<<$", V/binary, $">>;
|
||||
is_boolean(V) andalso V ->
|
||||
<<"true">>;
|
||||
is_boolean(V) andalso not V ->
|
||||
<<"false">>;
|
||||
is_list(V) ->
|
||||
Items = lists:map(fun(E) -> serialize(E) end, V),
|
||||
V0 = iolist_to_binary(lists:join(<<$,>>, Items)),
|
||||
<<$[, V0/binary, $]>>
|
||||
end,
|
||||
Item = <<$", K/binary, $", $:, V1/binary>>,
|
||||
serialize(T, [Item|Target]).
|
||||
243
apps/iot/src/endpoint/iot_zd_endpoint.erl
Normal file
243
apps/iot/src/endpoint/iot_zd_endpoint.erl
Normal file
@ -0,0 +1,243 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 06. 7月 2023 12:02
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_zd_endpoint).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_statem).
|
||||
|
||||
%% API
|
||||
-export([start_link/0]).
|
||||
-export([get_pid/0, forward/3, get_stat/0]).
|
||||
|
||||
%% gen_statem callbacks
|
||||
-export([init/1, handle_event/4, terminate/3, code_change/4, callback_mode/0]).
|
||||
|
||||
%% 消息重发间隔
|
||||
-define(RETRY_INTERVAL, 5000).
|
||||
|
||||
-record(state, {
|
||||
mqtt_opts = [],
|
||||
postman_pid :: undefined | pid(),
|
||||
logger_pid :: pid(),
|
||||
|
||||
%% 当前数据的游标, #north_data的id
|
||||
cursor = 0 :: integer(),
|
||||
%% 定时器
|
||||
timer_ref :: undefined | reference(),
|
||||
%% 是否繁忙
|
||||
is_busy = false :: boolean(),
|
||||
|
||||
%% 记录成功处理的消息数
|
||||
acc_num = 0
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
-spec get_pid() -> undefined | pid().
|
||||
get_pid() ->
|
||||
whereis(?MODULE).
|
||||
|
||||
-spec forward(LocationCode :: binary(), Fields :: list(), Timestamp :: integer()) -> no_return().
|
||||
forward(LocationCode, Fields, Timestamp) when is_binary(LocationCode), is_list(Fields); is_binary(Fields), is_integer(Timestamp) ->
|
||||
gen_statem:cast(?MODULE, {forward, LocationCode, Fields, Timestamp}).
|
||||
|
||||
-spec get_stat() -> {ok, Stat :: #{}}.
|
||||
get_stat() ->
|
||||
gen_statem:call(?MODULE, get_stat, 5000).
|
||||
|
||||
%% @doc Creates a gen_statem process which calls Module:init/1 to
|
||||
%% initialize. To ensure a synchronized start-up procedure, this
|
||||
%% function does not return until Module:init/1 has returned.
|
||||
start_link() ->
|
||||
gen_statem:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_statem callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Whenever a gen_statem is started using gen_statem:start/[3,4] or
|
||||
%% gen_statem:start_link/[3,4], this function is called by the new
|
||||
%% process to initialize.
|
||||
init([]) ->
|
||||
{ok, Opts} = application:get_env(iot, zhongdian),
|
||||
|
||||
erlang:process_flag(trap_exit, true),
|
||||
%% 创建转发器, 避免阻塞当前进程的创建,因此采用了延时初始化的机制
|
||||
erlang:start_timer(0, self(), create_postman),
|
||||
%% 启动日志记录器
|
||||
{ok, LoggerPid} = iot_logger:start_link("north_data"),
|
||||
|
||||
{ok, disconnected, #state{mqtt_opts = Opts, postman_pid = undefined, logger_pid = LoggerPid}}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it needs to find out
|
||||
%% the callback mode of the callback module.
|
||||
callback_mode() ->
|
||||
handle_event_function.
|
||||
|
||||
%% @private
|
||||
%% @doc There should be one instance of this function for each possible
|
||||
%% state name. If callback_mode is state_functions, one of these
|
||||
%% functions is called when gen_statem receives and event from
|
||||
%% call/2, cast/2, or as a normal process message.
|
||||
|
||||
handle_event(cast, {forward, LocationCode, Fields, Timestamp}, StateName, State = #state{is_busy = IsBusy}) ->
|
||||
mnesia_queue:insert(#north_data{location_code = LocationCode, fields = Fields, timestamp = Timestamp}),
|
||||
%% 避免不必要的内部消息
|
||||
Actions = case StateName =:= connected andalso not IsBusy of
|
||||
true -> [{next_event, info, fetch_next}];
|
||||
false -> []
|
||||
end,
|
||||
{keep_state, State, Actions};
|
||||
|
||||
%% 触发读取下一条数据
|
||||
handle_event(info, fetch_next, disconnected, State) ->
|
||||
lager:debug("[iot_zd_endpoint] fetch_next postman offline, data in queue"),
|
||||
{keep_state, State};
|
||||
handle_event(info, fetch_next, connected, State = #state{is_busy = true}) ->
|
||||
{keep_state, State};
|
||||
handle_event(info, fetch_next, connected, State = #state{postman_pid = PostmanPid, cursor = Cursor}) ->
|
||||
case mnesia_queue:dirty_fetch_next(Cursor) of
|
||||
{ok, NCursor, NorthData} ->
|
||||
lager:debug("[iot_zd_endpoint] fetch_next success, north data is: ~p", [NorthData]),
|
||||
do_post(PostmanPid, NorthData),
|
||||
TimerRef = erlang:start_timer(?RETRY_INTERVAL, self(), {repost_ticker, NorthData}),
|
||||
|
||||
{keep_state, State#state{cursor = NCursor, timer_ref = TimerRef, is_busy = true}};
|
||||
'$end_of_table' ->
|
||||
{keep_state, State}
|
||||
end;
|
||||
|
||||
%% 收到确认消息
|
||||
handle_event(info, {ack, Id, AssocMessage}, StateName, State = #state{timer_ref = TimerRef, acc_num = AccNum, logger_pid = LoggerPid}) ->
|
||||
%% 记录日志信息
|
||||
iot_logger:write(LoggerPid, AssocMessage),
|
||||
|
||||
ok = mnesia_queue:delete(Id),
|
||||
lager:debug("[iot_zd_endpoint] get ack: ~p", [Id]),
|
||||
Actions = case StateName =:= connected of
|
||||
true -> [{next_event, info, fetch_next}];
|
||||
false -> []
|
||||
end,
|
||||
is_reference(TimerRef) andalso erlang:cancel_timer(TimerRef),
|
||||
|
||||
{keep_state, State#state{timer_ref = undefined, acc_num = AccNum + 1, is_busy = false}, Actions};
|
||||
|
||||
%% 收到重发过期请求
|
||||
handle_event(info, {timeout, _, {repost_ticker, NorthData}}, connected, State = #state{postman_pid = PostmanPid}) ->
|
||||
lager:debug("[iot_zd_endpoint] repost data: ~p", [NorthData]),
|
||||
do_post(PostmanPid, NorthData),
|
||||
TimerRef = erlang:start_timer(?RETRY_INTERVAL, self(), {repost_ticker, NorthData}),
|
||||
|
||||
{keep_state, State#state{timer_ref = TimerRef}};
|
||||
|
||||
%% 离线时,忽略超时逻辑
|
||||
handle_event(info, {timeout, _, {repost_ticker, _}}, disconnected, State) ->
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(info, {timeout, _, create_postman}, disconnected, State = #state{mqtt_opts = Opts}) ->
|
||||
lager:debug("[iot_zd_endpoint] create postman"),
|
||||
try
|
||||
{ok, PostmanPid} = create_postman(Opts),
|
||||
{next_state, connected, State#state{postman_pid = PostmanPid, timer_ref = undefined, is_busy = false}, [{next_event, info, fetch_next}]}
|
||||
catch _:Error:Stack ->
|
||||
lager:warning("[iot_zd_endpoint] config: ~p, create postman get error: ~p, stack: ~p", [Opts, Error, Stack]),
|
||||
erlang:start_timer(?RETRY_INTERVAL, self(), create_postman),
|
||||
|
||||
{keep_state, State#state{postman_pid = undefined}}
|
||||
end;
|
||||
|
||||
%% 获取当前统计信息
|
||||
handle_event({call, From}, get_stat, StateName, State = #state{acc_num = AccNum}) ->
|
||||
Stat = #{
|
||||
<<"acc_num">> => AccNum,
|
||||
<<"queue_num">> => mnesia_queue:table_size(),
|
||||
<<"state_name">> => atom_to_binary(StateName)
|
||||
},
|
||||
{keep_state, State, [{reply, From, Stat}]};
|
||||
|
||||
%% postman进程挂掉时,重新建立新的
|
||||
handle_event(info, {'EXIT', PostmanPid, Reason}, connected, State = #state{timer_ref = TimerRef, postman_pid = PostmanPid}) ->
|
||||
lager:warning("[iot_zd_endpoint] postman exited with reason: ~p", [Reason]),
|
||||
is_reference(TimerRef) andalso erlang:cancel_timer(TimerRef),
|
||||
erlang:start_timer(?RETRY_INTERVAL, self(), create_postman),
|
||||
|
||||
{next_state, disconnected, State#state{timer_ref = undefined, postman_pid = undefined}};
|
||||
|
||||
%% @private
|
||||
%% @doc If callback_mode is handle_event_function, then whenever a
|
||||
%% gen_statem receives an event from call/2, cast/2, or as a normal
|
||||
%% process message, this function is called.
|
||||
handle_event(EventType, Event, StateName, State) ->
|
||||
lager:warning("[iot_zd_endpoint] unknown message, event_type: ~p, event: ~p, state_name: ~p, state: ~p", [EventType, Event, StateName, State]),
|
||||
{keep_state, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_statem terminates with
|
||||
%% Reason. The return value is ignored.
|
||||
terminate(Reason, _StateName, #state{}) ->
|
||||
lager:debug("[iot_zd_endpoint] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
code_change(_OldVsn, StateName, State = #state{}, _Extra) ->
|
||||
{ok, StateName, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
%% 对mqtt协议的支持, 只需要建立单个链接
|
||||
create_postman(Opts) ->
|
||||
Host = proplists:get_value(host, Opts),
|
||||
Port = proplists:get_value(port, Opts),
|
||||
Username = proplists:get_value(username, Opts),
|
||||
Password = proplists:get_value(password, Opts),
|
||||
Topic = proplists:get_value(topic, Opts),
|
||||
Qos = proplists:get_value(qos, Opts),
|
||||
|
||||
Node = atom_to_binary(node()),
|
||||
ClientId = <<"mqtt-client-", Node/binary, "-zhongdian_mqtt">>,
|
||||
PostmanOpts = [
|
||||
{clientid, ClientId},
|
||||
{host, Host},
|
||||
{port, Port},
|
||||
{tcp_opts, []},
|
||||
{username, Username},
|
||||
{password, Password},
|
||||
{keepalive, 86400},
|
||||
{auto_ack, true},
|
||||
{connect_timeout, 5000},
|
||||
{proto_ver, v5},
|
||||
{retry_interval, 5000}
|
||||
],
|
||||
|
||||
mqtt_postman:start_link(PostmanOpts, Topic, Qos).
|
||||
|
||||
-spec do_post(PostmanPid :: pid(), NorthData :: #north_data{}) -> no_return().
|
||||
do_post(PostmanPid, #north_data{id = Id, location_code = LocationCode, fields = Fields, timestamp = Timestamp}) when is_pid(PostmanPid) ->
|
||||
Data = #{
|
||||
<<"version">> => <<"1.0">>,
|
||||
<<"location_code">> => LocationCode,
|
||||
<<"ts">> => Timestamp,
|
||||
<<"properties">> => Fields
|
||||
},
|
||||
try
|
||||
Body = iolist_to_binary(jiffy:encode(Data, [force_utf8])),
|
||||
PostmanPid ! {post, self(), #post_data{id = Id, location_code = LocationCode, body = Body}}
|
||||
catch _:_ ->
|
||||
self() ! {ack, Id, <<"json error">>}
|
||||
end.
|
||||
68
apps/iot/src/http_handler/device_handler.erl
Normal file
68
apps/iot/src/http_handler/device_handler.erl
Normal file
@ -0,0 +1,68 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2020, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 4月 2020 3:36 下午
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(device_handler).
|
||||
-author("licheng5").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([handle_request/4]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% helper methods
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% 重新加载对应的主机信息
|
||||
handle_request("POST", "/device/reload", _, #{<<"host_id">> := HostId, <<"device_uuid">> := DeviceUUID}) when is_integer(HostId), is_binary(DeviceUUID) ->
|
||||
lager:debug("[device_handler] host_id: ~p, will reload device uuid: ~p", [HostId, DeviceUUID]),
|
||||
AliasName = iot_host:get_alias_name(HostId),
|
||||
case global:whereis_name(AliasName) of
|
||||
undefined ->
|
||||
{ok, 200, iot_util:json_error(404, <<"reload device failed">>)};
|
||||
HostPid when is_pid(HostPid) ->
|
||||
case iot_host:reload_device(HostPid, DeviceUUID) of
|
||||
ok ->
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
{error, Reason} ->
|
||||
lager:debug("[device_handler] reload device: ~p, get error: ~p", [DeviceUUID, Reason]),
|
||||
{ok, 200, iot_util:json_error(404, <<"reload device failed">>)}
|
||||
end
|
||||
end;
|
||||
|
||||
%% 删除对应的主机信息
|
||||
handle_request("POST", "/device/delete", _, #{<<"host_id">> := HostId, <<"device_uuid">> := DeviceUUID}) when is_integer(HostId), is_binary(DeviceUUID) ->
|
||||
AliasName = iot_host:get_alias_name(HostId),
|
||||
case global:whereis_name(AliasName) of
|
||||
undefined ->
|
||||
{ok, 200, iot_util:json_error(404, <<"delete device failed">>)};
|
||||
HostPid when is_pid(HostPid) ->
|
||||
ok = iot_host:delete_device(HostPid, DeviceUUID),
|
||||
{ok, 200, iot_util:json_data(<<"success">>)}
|
||||
end;
|
||||
|
||||
%% 处理主机的授权的激活
|
||||
handle_request("POST", "/device/activate", _, #{<<"host_id">> := HostId, <<"device_uuid">> := DeviceUUID, <<"auth">> := Auth})
|
||||
when is_integer(HostId), is_binary(DeviceUUID), is_boolean(Auth) ->
|
||||
|
||||
AliasName = iot_host:get_alias_name(HostId),
|
||||
case global:whereis_name(AliasName) of
|
||||
undefined ->
|
||||
{ok, 200, iot_util:json_error(404, <<"activate device failed">>)};
|
||||
HostPid when is_pid(HostPid) ->
|
||||
case iot_host:activate_device(HostPid, DeviceUUID, Auth) of
|
||||
ok ->
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
{error, Reason} ->
|
||||
lager:debug("[device_handler] activate device: ~p, get error: ~p", [DeviceUUID, Reason]),
|
||||
{ok, 200, iot_util:json_error(404, <<"activate device failed">>)}
|
||||
end
|
||||
end;
|
||||
|
||||
handle_request(_, Path, _, _) ->
|
||||
Path1 = list_to_binary(Path),
|
||||
{ok, 200, iot_util:json_error(-1, <<"url: ", Path1/binary, " not found">>)}.
|
||||
152
apps/iot/src/http_handler/host_handler.erl
Normal file
152
apps/iot/src/http_handler/host_handler.erl
Normal file
@ -0,0 +1,152 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2020, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 4月 2020 3:36 下午
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(host_handler).
|
||||
-author("licheng5").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([handle_request/4]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% helper methods
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
handle_request("GET", "/host/metric", #{<<"uuid">> := UUID}, _) ->
|
||||
lager:debug("[host_handler] get host metric uuid is: ~p", [UUID]),
|
||||
case iot_host:get_pid(UUID) of
|
||||
undefined ->
|
||||
{ok, 200, iot_util:json_error(404, <<"host not found">>)};
|
||||
Pid when is_pid(Pid) ->
|
||||
{ok, MetricInfo} = iot_host:get_metric(Pid),
|
||||
case map_size(MetricInfo) > 0 of
|
||||
true ->
|
||||
{ok, 200, iot_util:json_data(MetricInfo)};
|
||||
false ->
|
||||
{ok, 200, iot_util:json_error(404, <<"no metric info">>)}
|
||||
end
|
||||
end;
|
||||
|
||||
%% 处理主机的授权的 取消激活
|
||||
handle_request("GET", "/host/status", #{<<"uuid">> := UUID}, _) when is_binary(UUID) ->
|
||||
case iot_host:get_pid(UUID) of
|
||||
undefined ->
|
||||
{ok, 200, iot_util:json_error(404, <<"host not found">>)};
|
||||
Pid when is_pid(Pid) ->
|
||||
{ok, StatusInfo} = iot_host:get_status(Pid),
|
||||
{ok, 200, iot_util:json_data(StatusInfo)}
|
||||
end;
|
||||
|
||||
%% 重新加载对应的主机信息
|
||||
handle_request("POST", "/host/reload", _, #{<<"uuid">> := UUID}) when is_binary(UUID) ->
|
||||
lager:debug("[host_handler] will reload host uuid: ~p", [UUID]),
|
||||
case iot_host_sup:ensured_host_started(UUID) of
|
||||
{ok, Pid} when is_pid(Pid) ->
|
||||
{ok, #{<<"authorize_status">> := AuthorizeStatus}} = host_bo:get_host_by_uuid(UUID),
|
||||
ok = iot_host:activate(Pid, AuthorizeStatus =:= 1),
|
||||
lager:debug("[host_handler] already_started reload host uuid: ~p, success", [UUID]),
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
Error ->
|
||||
lager:debug("[host_handler] reload host uuid: ~p, error: ~p", [UUID, Error]),
|
||||
{ok, 200, iot_util:json_error(404, <<"reload error">>)}
|
||||
end;
|
||||
|
||||
%% 删除对应的主机信息
|
||||
handle_request("POST", "/host/delete", _, #{<<"uuid">> := UUID}) when is_binary(UUID) ->
|
||||
case iot_host_sup:delete_host(UUID) of
|
||||
ok ->
|
||||
lager:debug("[host_handler] will delete host uuid: ~p", [UUID]),
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
{error, Reason} ->
|
||||
lager:debug("[host_handler] delete host uuid: ~p, get error is: ~p", [UUID, Reason]),
|
||||
{ok, 200, iot_util:json_error(404, <<"error">>)}
|
||||
end;
|
||||
|
||||
%% 下发参数
|
||||
handle_request("POST", "/host/publish_command", _,
|
||||
PostParams = #{<<"uuid">> := UUID, <<"command_type">> := CommandType, <<"task_id">> := TaskId, <<"timeout">> := Timeout, <<"params">> := Params})
|
||||
when is_binary(UUID), is_integer(TaskId), is_integer(Timeout), is_integer(CommandType) ->
|
||||
|
||||
lager:debug("[http_host_handler] publish_command body is: ~p", [PostParams]),
|
||||
case iot_host:get_pid(UUID) of
|
||||
undefined ->
|
||||
{ok, 200, iot_util:json_error(404, <<"host not found">>)};
|
||||
Pid when is_pid(Pid) ->
|
||||
Reply = #{
|
||||
<<"t_id">> => integer_to_binary(TaskId),
|
||||
<<"t">> => Timeout,
|
||||
<<"ts">> => iot_util:current_time(),
|
||||
<<"m">> => iolist_to_binary(jiffy:encode(Params, [force_utf8]))
|
||||
},
|
||||
|
||||
Reply1 = append_service_name(PostParams, Reply),
|
||||
lager:debug("[http_host_handler] publish message is: ~p", [Reply1]),
|
||||
BinReply = iolist_to_binary(jiffy:encode(Reply1, [force_utf8])),
|
||||
|
||||
case iot_host:publish_message(Pid, CommandType, {aes, BinReply}, Timeout * 1000) of
|
||||
{error, timeout} ->
|
||||
lager:debug("[iot_host_handler] host_id uuid: ~p, publish topic success, but get ack timeout", [UUID]),
|
||||
{ok, _} = task_logs_bo:change_status(TaskId, ?TASK_STATUS_FAILED),
|
||||
{ok, 200, iot_util:json_error(401, <<"命令执行超时, 请重试"/utf8>>)};
|
||||
{error, Reason} when is_binary(Reason) ->
|
||||
task_logs_bo:change_status(TaskId, ?TASK_STATUS_FAILED),
|
||||
{ok, 200, iot_util:json_error(400, Reason)};
|
||||
ok ->
|
||||
{ok, _} = task_logs_bo:change_status(TaskId, ?TASK_STATUS_OK),
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
{ok, Response} ->
|
||||
case jiffy:decode(Response, [return_maps]) of
|
||||
#{<<"code">> := 1} ->
|
||||
{ok, _} = task_logs_bo:change_status(TaskId, ?TASK_STATUS_OK),
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
#{<<"code">> := 0, <<"message">> := Message} when is_binary(Message) ->
|
||||
{ok, _} = task_logs_bo:change_status(TaskId, ?TASK_STATUS_FAILED),
|
||||
{ok, 200, iot_util:json_error(401, <<"操作失败: "/utf8, Message/binary>>)}
|
||||
end
|
||||
end
|
||||
end;
|
||||
|
||||
%% 处理主机的授权的激活
|
||||
handle_request("POST", "/host/activate", _, #{<<"uuid">> := UUID, <<"auth">> := true}) when is_binary(UUID) ->
|
||||
case iot_host_sup:ensured_host_started(UUID) of
|
||||
{error, Reason} ->
|
||||
lager:debug("[host_handler] activate host_id: ~p, failed with reason: ~p", [UUID, Reason]),
|
||||
{ok, 200, iot_util:json_error(400, <<"host not found">>)};
|
||||
{ok, Pid} when is_pid(Pid) ->
|
||||
lager:debug("[host_handler] activate host_id: ~p, start", [UUID]),
|
||||
ok = iot_host:activate(Pid, true),
|
||||
|
||||
{ok, 200, iot_util:json_data(<<"success">>)}
|
||||
end;
|
||||
|
||||
%% 处理主机的授权的 取消激活
|
||||
handle_request("POST", "/host/activate", _, #{<<"uuid">> := UUID, <<"auth">> := false}) when is_binary(UUID) ->
|
||||
case iot_host_sup:ensured_host_started(UUID) of
|
||||
{error, Reason} ->
|
||||
lager:debug("[host_handler] activate host_id: ~p, failed with reason: ~p", [UUID, Reason]),
|
||||
{ok, 200, iot_util:json_error(400, <<"host not found">>)};
|
||||
{ok, Pid} when is_pid(Pid) ->
|
||||
lager:debug("[host_handler] activate host_id: ~p, start", [UUID]),
|
||||
ok = iot_host:activate(Pid, false),
|
||||
|
||||
{ok, 200, iot_util:json_data(<<"success">>)}
|
||||
end;
|
||||
|
||||
handle_request(_, Path, _, _) ->
|
||||
Path1 = list_to_binary(Path),
|
||||
{ok, 200, iot_util:json_error(-1, <<"url: ", Path1/binary, " not found">>)}.
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% helper methods
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% 追加service_name参数
|
||||
append_service_name(#{<<"service_name">> := ServiceName}, Reply) when is_binary(ServiceName), ServiceName =/= <<"">> ->
|
||||
Reply#{<<"to">> => ServiceName};
|
||||
append_service_name(_, Reply) ->
|
||||
Reply.
|
||||
82
apps/iot/src/http_handler/http_protocol.erl
Normal file
82
apps/iot/src/http_handler/http_protocol.erl
Normal file
@ -0,0 +1,82 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2020, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 4月 2020 3:36 下午
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(http_protocol).
|
||||
-author("licheng5").
|
||||
|
||||
%% API
|
||||
-export([init/2]).
|
||||
|
||||
init(Req0, Opts = [Mod|_]) ->
|
||||
Method = binary_to_list(cowboy_req:method(Req0)),
|
||||
Path = binary_to_list(cowboy_req:path(Req0)),
|
||||
GetParams0 = cowboy_req:parse_qs(Req0),
|
||||
GetParams = maps:from_list(GetParams0),
|
||||
{ok, PostParams, Req1} = parse_body(Req0),
|
||||
|
||||
try Mod:handle_request(Method, Path, GetParams, PostParams) of
|
||||
{ok, StatusCode, Resp} ->
|
||||
lager:debug("[http_protocol] request path: ~p, get_params: ~p, post_params: ~p, response: ~ts",
|
||||
[Path, GetParams, PostParams, Resp]),
|
||||
AcceptEncoding = cowboy_req:header(<<"accept-encoding">>, Req1, <<>>),
|
||||
Req2 = case iolist_size(Resp) >= 1024 andalso supported_gzip(AcceptEncoding) of
|
||||
true ->
|
||||
Resp1 = zlib:gzip(Resp),
|
||||
cowboy_req:reply(StatusCode, #{
|
||||
<<"Content-Type">> => <<"application/json;charset=utf-8">>,
|
||||
<<"Content-Encoding">> => <<"gzip">>
|
||||
}, Resp1, Req1);
|
||||
false ->
|
||||
cowboy_req:reply(StatusCode, #{
|
||||
<<"Content-Type">> => <<"application/json;charset=utf-8">>
|
||||
}, Resp, Req1)
|
||||
end,
|
||||
{ok, Req2, Opts}
|
||||
catch
|
||||
throw:Error ->
|
||||
ErrResp = iot_util:json_error(-1, Error),
|
||||
Req2 = cowboy_req:reply(404, #{
|
||||
<<"Content-Type">> => <<"application/json;charset=utf-8">>
|
||||
}, ErrResp, Req1),
|
||||
{ok, Req2, Opts};
|
||||
_:Error:Stack ->
|
||||
lager:warning("[http_handler] get error: ~p, stack: ~p", [Error, Stack]),
|
||||
Req2 = cowboy_req:reply(500, #{
|
||||
<<"Content-Type">> => <<"text/html;charset=utf-8">>
|
||||
}, <<"Internal Server Error">>, Req1),
|
||||
{ok, Req2, Opts}
|
||||
end.
|
||||
|
||||
%% 判断是否支持gzip
|
||||
supported_gzip(AcceptEncoding) when is_binary(AcceptEncoding) ->
|
||||
binary:match(AcceptEncoding, <<"gzip">>) =/= nomatch.
|
||||
|
||||
parse_body(Req0) ->
|
||||
ContentType = cowboy_req:header(<<"content-type">>, Req0),
|
||||
case ContentType of
|
||||
<<"application/json", _/binary>> ->
|
||||
{ok, Body, Req1} = read_body(Req0),
|
||||
{ok, catch jiffy:decode(Body, [return_maps]), Req1};
|
||||
<<"application/x-www-form-urlencoded">> ->
|
||||
{ok, PostParams0, Req1} = cowboy_req:read_urlencoded_body(Req0),
|
||||
PostParams = maps:from_list(PostParams0),
|
||||
{ok, PostParams, Req1};
|
||||
_ ->
|
||||
{ok, #{}, Req0}
|
||||
end.
|
||||
|
||||
%% 读取请求体
|
||||
read_body(Req) ->
|
||||
read_body(Req, <<>>).
|
||||
read_body(Req, AccData) ->
|
||||
case cowboy_req:read_body(Req) of
|
||||
{ok, Data, Req1} ->
|
||||
{ok, <<AccData/binary, Data/binary>>, Req1};
|
||||
{more, Data, Req1} ->
|
||||
read_body(Req1, <<AccData/binary, Data/binary>>)
|
||||
end.
|
||||
31
apps/iot/src/http_handler/test_handler.erl
Normal file
31
apps/iot/src/http_handler/test_handler.erl
Normal file
@ -0,0 +1,31 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2020, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 4月 2020 3:36 下午
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(test_handler).
|
||||
-author("licheng5").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([handle_request/4]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% helper methods
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% 重新加载对应的主机信息
|
||||
handle_request("POST", "/test/receiver", _, PostParams) ->
|
||||
lager:debug("[test_handler] get post params: ~p", [PostParams]),
|
||||
{ok, 200, iot_util:json_data(<<"success">>)};
|
||||
|
||||
handle_request(_, Path, _, _) ->
|
||||
Path1 = list_to_binary(Path),
|
||||
{ok, 200, iot_util:json_error(-1, <<"url: ", Path1/binary, " not found">>)}.
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% helper methods
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
35
apps/iot/src/http_handler/totalizator_handler.erl
Normal file
35
apps/iot/src/http_handler/totalizator_handler.erl
Normal file
@ -0,0 +1,35 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2020, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 4月 2020 3:36 下午
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(totalizator_handler).
|
||||
-author("licheng5").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([handle_request/4]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% helper methods
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% 重新加载对应的主机信息
|
||||
handle_request("POST", "/totalizator/query", _, #{<<"scene_ids">> := SceneIds, <<"dates">> := Dates0}) when is_list(SceneIds), is_list(Dates0) ->
|
||||
Dates = lists:map(fun(Date) ->
|
||||
[Year0, Month0, Day0] = binary:split(Date, <<"-">>, [global]),
|
||||
Year = binary_to_integer(Year0),
|
||||
Month = binary_to_integer(Month0),
|
||||
Day = binary_to_integer(Day0),
|
||||
{Year, Month, Day}
|
||||
end, Dates0),
|
||||
|
||||
List = mnesia_totalizator:query(SceneIds, Dates),
|
||||
{ok, 200, iot_util:json_data(List)};
|
||||
|
||||
handle_request(_, Path, _, _) ->
|
||||
Path1 = list_to_binary(Path),
|
||||
{ok, 200, iot_util:json_error(-1, <<"url: ", Path1/binary, " not found??">>)}.
|
||||
199
apps/iot/src/influxdb/influx_client.erl
Normal file
199
apps/iot/src/influxdb/influx_client.erl
Normal file
@ -0,0 +1,199 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 30. 5月 2023 10:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(influx_client).
|
||||
-author("aresei").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/1, write/4, write/5, write_data/4]).
|
||||
-export([get_precision/1]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
-define(INFLUX_POOl, influx_pool).
|
||||
|
||||
-define(DEFAULT_BUCKET, <<"metric">>).
|
||||
-define(DEFAULT_ORG, <<"nannong">>).
|
||||
|
||||
-record(state, {
|
||||
host,
|
||||
port,
|
||||
token :: binary()
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
%% 数据过滤器
|
||||
data_filter(#{<<"key">> := Key}) when is_binary(Key), Key /= <<>> ->
|
||||
true;
|
||||
data_filter(_) ->
|
||||
false.
|
||||
|
||||
%% 获取时间标识符号
|
||||
-spec get_precision(Timestamp :: integer()) -> binary().
|
||||
get_precision(Timestamp) when is_integer(Timestamp) ->
|
||||
case length(integer_to_list(Timestamp)) of
|
||||
10 ->
|
||||
<<"s">>;
|
||||
13 ->
|
||||
<<"ms">>;
|
||||
16 ->
|
||||
<<"u">>;
|
||||
19 ->
|
||||
<<"ns">>;
|
||||
_ ->
|
||||
<<"ms">>
|
||||
end.
|
||||
|
||||
-spec write_data(Measurement :: binary(), Tags :: map(), FieldsList :: list(), Timestamp :: integer()) -> no_return().
|
||||
write_data(Measurement, Tags, FieldsList, Timestamp) when is_binary(Measurement), is_map(Tags), is_list(FieldsList), is_integer(Timestamp) ->
|
||||
%% 过来掉没有key的选项
|
||||
NFieldsList = lists:filter(fun data_filter/1, FieldsList),
|
||||
case length(NFieldsList) > 0 of
|
||||
true ->
|
||||
%% 按照设备的uuid进行分组
|
||||
Points = lists:map(fun(Fields = #{<<"key">> := Key}) ->
|
||||
Values = maps:remove(<<"key">>, Fields),
|
||||
NFields = #{Key => Values},
|
||||
influx_point:new(Measurement, Tags, NFields, Timestamp)
|
||||
end, NFieldsList),
|
||||
Precision = influx_client:get_precision(Timestamp),
|
||||
|
||||
poolboy:transaction(influx_pool, fun(Pid) -> influx_client:write(Pid, ?DEFAULT_BUCKET, ?DEFAULT_ORG, Precision, Points) end);
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
-spec write(Pid :: pid(), Bucket :: binary(), Org :: binary(), Points :: list()) -> no_return().
|
||||
write(Pid, Bucket, Org, Points) when is_pid(Pid), is_binary(Bucket), is_binary(Org), is_list(Points) ->
|
||||
write(Pid, Bucket, Org, <<"ms">>, Points).
|
||||
|
||||
%% Precision的值为: ms|ns|s; 表示时间的精度,默认为毫秒(ms)
|
||||
-spec write(Pid :: pid(), Bucket :: binary(), Org :: binary(), Precision :: binary(), Points :: list()) -> no_return().
|
||||
write(Pid, Bucket, Org, Precision, Points) when is_pid(Pid), is_binary(Bucket), is_binary(Org), is_binary(Precision), is_list(Points) ->
|
||||
gen_server:cast(Pid, {write, Bucket, Org, Precision, Points}).
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link(Opts :: list()) ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link(Opts) when is_list(Opts) ->
|
||||
gen_server:start_link(?MODULE, [Opts], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([InfluxProps]) ->
|
||||
Token = proplists:get_value(token, InfluxProps),
|
||||
Host = proplists:get_value(host, InfluxProps),
|
||||
Port = proplists:get_value(port, InfluxProps),
|
||||
|
||||
{ok, #state{host = Host, port = Port, token = Token}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Request, _From, State = #state{}) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast({write, Bucket, Org, Precision, Points}, State = #state{host = Host, port = Port, token = Token}) ->
|
||||
%% 处理headers
|
||||
Headers = [
|
||||
{<<"Accept">>, <<"application/json">>},
|
||||
{<<"Accept-Encoding">>, <<"identity">>},
|
||||
{<<"Content-Type">>, <<"text/plain; charset=utf-8">>},
|
||||
{<<"Content-Encoding">>, <<"gzip">>},
|
||||
{<<"Authorization">>, <<"Token ", Token/binary>>}
|
||||
],
|
||||
|
||||
%% 处理points
|
||||
PointLines = lists:map(fun influx_point:normalized/1, Points),
|
||||
Body = iolist_to_binary(lists:join(<<"\n">>, PointLines)),
|
||||
%% gzip压缩
|
||||
GZipBody = zlib:gzip(Body),
|
||||
Query = uri_string:compose_query([{<<"bucket">>, Bucket}, {<<"org">>, Org}, {<<"precision">>, Precision}]),
|
||||
|
||||
Url = uri_string:normalize(#{
|
||||
scheme => "http",
|
||||
host => Host,
|
||||
port => Port,
|
||||
path => "/api/v2/write",
|
||||
query => Query
|
||||
}),
|
||||
|
||||
lager:debug("[influx_client] url is: ~p, headers: ~p, body: ~ts", [Url, Headers, Body]),
|
||||
case hackney:request(post, Url, Headers, GZipBody, [{pool, false}]) of
|
||||
{ok, StatusCode, _RespHeaders, ClientRef} ->
|
||||
case hackney:body(ClientRef) of
|
||||
{ok, RespBody} ->
|
||||
lager:debug("[influx_client] status_code: ~p, response body is: ~p", [StatusCode, RespBody]);
|
||||
{error, Error} ->
|
||||
lager:warning("[influx_client] status_code: ~p, error is: ~p", [Error])
|
||||
end,
|
||||
hackney:close(ClientRef);
|
||||
{error, Reason} ->
|
||||
lager:warning("[influx_client] request result is: ~p", [Reason])
|
||||
end,
|
||||
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info(_Info, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(_Reason, _State = #state{}) ->
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
59
apps/iot/src/influxdb/influx_point.erl
Normal file
59
apps/iot/src/influxdb/influx_point.erl
Normal file
@ -0,0 +1,59 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 30. 5月 2023 11:28
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(influx_point).
|
||||
-author("aresei").
|
||||
|
||||
-record(point, {
|
||||
measurement,
|
||||
tags = [],
|
||||
fields = [],
|
||||
time = 0 :: integer()
|
||||
}).
|
||||
|
||||
%% API
|
||||
-export([new/4, normalized/1]).
|
||||
|
||||
-spec new(Measurement :: binary(), Tags :: list() | map(), Fields :: list() | map(), Timestamp :: integer()) -> #point{}.
|
||||
new(Measurement, Tags, Fields, Timestamp) when is_binary(Measurement), is_list(Tags); is_map(Tags), is_list(Fields); is_map(Fields), is_integer(Timestamp) ->
|
||||
#point{measurement = Measurement, tags = as_list(Tags), fields = as_list(Fields), time = Timestamp}.
|
||||
|
||||
normalized(#point{measurement = Name, tags = Tags, fields = Fields, time = Time}) ->
|
||||
NTags = lists:map(fun({N, V}) -> <<N/binary, $=, V/binary>> end, Tags),
|
||||
NFields = lists:map(fun({K, V}) -> <<K/binary, $=, (field_val(V))/binary>> end, Fields),
|
||||
|
||||
TagItems = lists:join(<<",">>, [Name | NTags]),
|
||||
FieldItems = lists:join(<<",">>, NFields),
|
||||
|
||||
NTime = case Time > 0 of
|
||||
true -> Time;
|
||||
false -> iot_util:timestamp()
|
||||
end,
|
||||
|
||||
erlang:iolist_to_binary([TagItems, <<" ">>, FieldItems, <<" ">>, integer_to_binary(NTime)]).
|
||||
|
||||
field_val(V) when is_integer(V) ->
|
||||
integer_to_binary(V);
|
||||
field_val(V) when is_float(V) ->
|
||||
%% 默认按照浮点数表示
|
||||
iot_util:float_to_binary(V, 6);
|
||||
field_val(V) when is_binary(V) ->
|
||||
<<$", V/binary, $">>;
|
||||
field_val(V) when is_list(V); is_map(V) ->
|
||||
S = base64:encode(iolist_to_binary(jiffy:encode(V, [force_utf8]))),
|
||||
<<$", "base64:", S/binary, $">>;
|
||||
field_val(true) ->
|
||||
<<"true">>;
|
||||
field_val(false) ->
|
||||
<<"false">>.
|
||||
|
||||
as_list(L) when is_list(L) ->
|
||||
L;
|
||||
as_list(L) when is_map(L) ->
|
||||
maps:to_list(L).
|
||||
|
||||
34
apps/iot/src/iot.app.src
Normal file
34
apps/iot/src/iot.app.src
Normal file
@ -0,0 +1,34 @@
|
||||
{application, iot,
|
||||
[{description, "An OTP application"},
|
||||
{vsn, "0.1.0"},
|
||||
{registered, []},
|
||||
{mod, {iot_app, []}},
|
||||
{applications,
|
||||
[
|
||||
sync,
|
||||
eredis,
|
||||
ranch,
|
||||
cowboy,
|
||||
lager,
|
||||
jiffy,
|
||||
parse_trans,
|
||||
hackney,
|
||||
poolboy,
|
||||
mysql,
|
||||
esockd,
|
||||
mnesia,
|
||||
crypto,
|
||||
public_key,
|
||||
ssl,
|
||||
erts,
|
||||
runtime_tools,
|
||||
observer,
|
||||
kernel,
|
||||
stdlib
|
||||
]},
|
||||
{env,[]},
|
||||
{modules, []},
|
||||
|
||||
{licenses, ["Apache 2.0"]},
|
||||
{links, []}
|
||||
]}.
|
||||
26
apps/iot/src/iot_ai_router.erl
Normal file
26
apps/iot/src/iot_ai_router.erl
Normal file
@ -0,0 +1,26 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 04. 7月 2023 11:30
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_ai_router).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([route_uuid/3]).
|
||||
|
||||
-spec route_uuid(RouterUUID :: binary(), EventType :: integer(), Params :: map()) -> no_return().
|
||||
route_uuid(RouterUUID, EventType, Params) when is_binary(RouterUUID), is_integer(EventType), is_map(Params) ->
|
||||
%% 查找终端设备对应的点位信息
|
||||
case redis_client:hget(RouterUUID, <<"location_code">>) of
|
||||
{ok, undefined} ->
|
||||
lager:debug("[iot_ai_router] the event_data hget location_code, uuid: ~p, not found", [RouterUUID]);
|
||||
{ok, LocationCode} when is_binary(LocationCode) ->
|
||||
iot_jinzhi_endpoint:forward(LocationCode, EventType, Params);
|
||||
{error, Reason} ->
|
||||
lager:debug("[iot_ai_router] the event_data hget location_code uuid: ~p, get error: ~p", [RouterUUID, Reason])
|
||||
end.
|
||||
138
apps/iot/src/iot_api.erl
Normal file
138
apps/iot/src/iot_api.erl
Normal file
@ -0,0 +1,138 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author anlicheng
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 24. 12月 2023 15:42
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_api).
|
||||
-author("anlicheng").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/0]).
|
||||
-export([ai_event/1]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
-define(API_TOKEN, <<"wv6fGyBhl*7@AsD9">>).
|
||||
|
||||
-record(state, {
|
||||
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
ai_event(Id) when is_integer(Id) ->
|
||||
gen_server:cast(?MODULE, {ai_event, Id}).
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link() ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([]) ->
|
||||
{ok, #state{}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Request, _From, State = #state{}) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast({ai_event, Id}, State = #state{}) ->
|
||||
spawn_monitor(fun() ->
|
||||
Token = iot_util:md5(<<?API_TOKEN/binary, (integer_to_binary(Id))/binary, ?API_TOKEN/binary>>),
|
||||
{ok, Url} = application:get_env(iot, api_url),
|
||||
|
||||
Headers = [
|
||||
{<<"content-type">>, <<"application/json">>}
|
||||
],
|
||||
ReqData = #{
|
||||
<<"token">> => Token,
|
||||
<<"id">> => Id
|
||||
},
|
||||
Body = iolist_to_binary(jiffy:encode(ReqData, [force_utf8])),
|
||||
case hackney:request(post, Url, Headers, Body, [{pool, false}]) of
|
||||
{ok, 200, _, ClientRef} ->
|
||||
{ok, RespBody} = hackney:body(ClientRef),
|
||||
lager:debug("[iot_api] send body: ~p, get error is: ~p", [Body, RespBody]),
|
||||
hackney:close(ClientRef);
|
||||
{ok, HttpCode, _, ClientRef} ->
|
||||
{ok, RespBody} = hackney:body(ClientRef),
|
||||
hackney:close(ClientRef),
|
||||
lager:warning("[iot_api] send body: ~p, get error is: ~p", [Body, {HttpCode, RespBody}]);
|
||||
{error, Reason} ->
|
||||
lager:warning("[iot_api] send body: ~p, get error is: ~p", [Body, Reason])
|
||||
end
|
||||
end),
|
||||
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
%% Task进程挂掉
|
||||
handle_info({'DOWN', _MRef, process, _Pid, normal}, State) ->
|
||||
{noreply, State};
|
||||
|
||||
handle_info({'DOWN', _MRef, process, _Pid, Reason}, State) ->
|
||||
lager:notice("[iot_api] task process down with reason: ~p", [Reason]),
|
||||
{noreply, State};
|
||||
|
||||
handle_info(_Info, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(_Reason, _State = #state{}) ->
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
23
apps/iot/src/iot_app.erl
Normal file
23
apps/iot/src/iot_app.erl
Normal file
@ -0,0 +1,23 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%% @doc iot public API
|
||||
%% @end
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_app).
|
||||
|
||||
-behaviour(application).
|
||||
|
||||
-include("iot.hrl").
|
||||
|
||||
-export([start/2, stop/1]).
|
||||
|
||||
start(_StartType, _StartArgs) ->
|
||||
io:setopts([{encoding, unicode}]),
|
||||
%% 加速内存的回收
|
||||
erlang:system_flag(fullsweep_after, 16),
|
||||
|
||||
iot_sup:start_link().
|
||||
|
||||
stop(_State) ->
|
||||
ok.
|
||||
|
||||
%% internal functions
|
||||
31
apps/iot/src/iot_auth.erl
Normal file
31
apps/iot/src/iot_auth.erl
Normal file
@ -0,0 +1,31 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%% 授权校验模块
|
||||
%%% @end
|
||||
%%% Created : 27. 6月 2023 09:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_auth).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([check/5]).
|
||||
|
||||
%% 检测token是否是合法值
|
||||
-spec check(Username :: binary(), Token :: binary(), UUID :: binary(), Salt :: binary(), Timestamp :: integer()) -> boolean().
|
||||
check(Username, Token, UUID, Salt, Timestamp) when is_binary(Username), is_binary(Token), is_binary(UUID), is_binary(Salt), is_integer(Timestamp) ->
|
||||
BinTimestamp = integer_to_binary(Timestamp),
|
||||
%% 1分钟内有效
|
||||
case iot_util:current_time() - Timestamp =< 60 of
|
||||
true ->
|
||||
{ok, PreTokens} = application:get_env(iot, pre_tokens),
|
||||
case proplists:get_value(Username, PreTokens) of
|
||||
undefined ->
|
||||
false;
|
||||
PreToken when is_binary(PreToken) ->
|
||||
iot_util:md5(<<Salt/binary, "!", PreToken/binary, "!", UUID/binary, "!", BinTimestamp/binary>>) =:= Token
|
||||
end;
|
||||
false ->
|
||||
false
|
||||
end.
|
||||
33
apps/iot/src/iot_cipher_aes.erl
Normal file
33
apps/iot/src/iot_cipher_aes.erl
Normal file
@ -0,0 +1,33 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2018, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 29. 六月 2018 09:30
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_cipher_aes).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([encrypt/2, decrypt/2]).
|
||||
-export([test/0]).
|
||||
|
||||
test() ->
|
||||
Aes = list_to_binary(iot_util:rand_bytes(32)),
|
||||
Enc = encrypt(Aes, <<"sdfsff hesdfs sfsdfsdffffffffffxyz yes call me">>),
|
||||
Data = decrypt(Aes, Enc),
|
||||
|
||||
lager:debug("enc: ~p, size: ~p, data len is: ~p, data: ~p", [Enc, byte_size(Enc), byte_size(Data), Data]).
|
||||
|
||||
%% 基于aes的加密算法, aes_256_cbc
|
||||
-spec encrypt(binary(), binary()) -> binary().
|
||||
encrypt(Key, PlainText) when is_binary(Key), is_binary(PlainText) ->
|
||||
IV = binary:part(Key, {0, 16}),
|
||||
crypto:crypto_one_time(aes_256_cbc, Key, IV, PlainText, [{encrypt, true}, {padding, pkcs_padding}]).
|
||||
|
||||
%% 基于aes的解密算法
|
||||
-spec decrypt(binary(), binary()) -> binary().
|
||||
decrypt(Key, CipherText) when is_binary(Key), is_binary(CipherText) ->
|
||||
IV = binary:part(Key, {0, 16}),
|
||||
crypto:crypto_one_time(aes_256_cbc, Key, IV, CipherText, [{encrypt, false}, {padding, pkcs_padding}]).
|
||||
34
apps/iot/src/iot_cipher_rsa.erl
Normal file
34
apps/iot/src/iot_cipher_rsa.erl
Normal file
@ -0,0 +1,34 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2018, <COMPANY>
|
||||
%%% @doc
|
||||
%%% 采用的RSA 2048 PKCS1
|
||||
%%% @end
|
||||
%%% Created : 21. 六月 2018 09:51
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_cipher_rsa).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([encode/2, decode/2, private_encode/2]).
|
||||
|
||||
%% 解密数据
|
||||
decode(Data, PrivateKey) when is_binary(Data), is_binary(PrivateKey) ->
|
||||
[Pri] = public_key:pem_decode(PrivateKey),
|
||||
PriKeyEntry = public_key:pem_entry_decode(Pri),
|
||||
public_key:decrypt_private(Data, PriKeyEntry).
|
||||
|
||||
%% 解密数据
|
||||
encode(Data, PublicKey) when is_map(Data), is_binary(PublicKey) ->
|
||||
BinData = jiffy:encode(Data, [force_utf8]),
|
||||
encode(BinData, PublicKey);
|
||||
|
||||
encode(Data, PublicKey) when is_binary(Data), is_binary(PublicKey) ->
|
||||
[Pub] = public_key:pem_decode(PublicKey),
|
||||
PubKey = public_key:pem_entry_decode(Pub),
|
||||
public_key:encrypt_public(Data, PubKey).
|
||||
|
||||
private_encode(Data, PrivateKey) when is_binary(Data), is_binary(PrivateKey) ->
|
||||
[Private] = public_key:pem_decode(PrivateKey),
|
||||
PriKey = public_key:pem_entry_decode(Private),
|
||||
public_key:encrypt_private(Data, PriKey).
|
||||
39
apps/iot/src/iot_config.erl
Normal file
39
apps/iot/src/iot_config.erl
Normal file
@ -0,0 +1,39 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 17. 4月 2023 16:41
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_config).
|
||||
-author("licheng5").
|
||||
|
||||
%% API
|
||||
-export([emqt_opts/1]).
|
||||
|
||||
emqt_opts(ClientSuffix) when is_binary(ClientSuffix) ->
|
||||
%% 建立到emqx服务器的连接
|
||||
{ok, Props} = application:get_env(iot, emqx_server),
|
||||
EMQXHost = proplists:get_value(host, Props),
|
||||
EMQXPort = proplists:get_value(port, Props, 18080),
|
||||
Username = proplists:get_value(username, Props),
|
||||
Password = proplists:get_value(password, Props),
|
||||
RetryInterval = proplists:get_value(retry_interval, Props, 5),
|
||||
Keepalive = proplists:get_value(keepalive, Props, 86400),
|
||||
|
||||
Node = atom_to_binary(node()),
|
||||
ClientId = <<"mqtt-client-", Node/binary, "-", ClientSuffix/binary>>,
|
||||
[
|
||||
{clientid, ClientId},
|
||||
{host, EMQXHost},
|
||||
{port, EMQXPort},
|
||||
{owner, self()},
|
||||
{tcp_opts, []},
|
||||
{username, Username},
|
||||
{password, Password},
|
||||
{keepalive, Keepalive},
|
||||
{auto_ack, true},
|
||||
{proto_ver, v5},
|
||||
{retry_interval, RetryInterval}
|
||||
].
|
||||
220
apps/iot/src/iot_device.erl
Normal file
220
apps/iot/src/iot_device.erl
Normal file
@ -0,0 +1,220 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 14. 8月 2023 11:40
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_device).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_statem).
|
||||
|
||||
%% API
|
||||
-export([get_name/1, get_pid/1]).
|
||||
-export([start_link/2, is_activated/1, is_alive/1, change_status/2, reload/1, auth/2]).
|
||||
|
||||
%% gen_statem callbacks
|
||||
-export([init/1, handle_event/4, terminate/3, code_change/4, callback_mode/0]).
|
||||
|
||||
%% 终端是否授权
|
||||
-define(DEVICE_AUTH_DENIED, 0).
|
||||
-define(DEVICE_AUTH_AUTHED, 1).
|
||||
|
||||
%% 状态
|
||||
-define(STATE_DENIED, denied).
|
||||
-define(STATE_ACTIVATED, activated).
|
||||
|
||||
-record(state, {
|
||||
device_uuid :: binary(),
|
||||
status = ?DEVICE_OFFLINE
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
-spec is_alive(DeviceUUID :: binary()) -> error | {ok, Pid :: pid()}.
|
||||
is_alive(DeviceUUID) when is_binary(DeviceUUID) ->
|
||||
case iot_device:get_pid(DeviceUUID) of
|
||||
undefined ->
|
||||
error;
|
||||
DevicePid when is_pid(DevicePid) ->
|
||||
case iot_device:is_activated(DevicePid) of
|
||||
true ->
|
||||
{ok, DevicePid};
|
||||
false ->
|
||||
error
|
||||
end
|
||||
end.
|
||||
|
||||
-spec get_pid(DeviceUUID :: binary()) -> Pid :: pid() | undefined.
|
||||
get_pid(DeviceUUID) when is_binary(DeviceUUID) ->
|
||||
whereis(get_name(DeviceUUID)).
|
||||
|
||||
-spec get_name(DeviceUUID :: binary()) -> atom().
|
||||
get_name(DeviceUUID) when is_binary(DeviceUUID) ->
|
||||
binary_to_atom(<<"iot_device:", DeviceUUID/binary>>).
|
||||
|
||||
-spec is_activated(Pid :: pid() | undefined) -> boolean().
|
||||
is_activated(undefined) ->
|
||||
false;
|
||||
is_activated(Pid) when is_pid(Pid) ->
|
||||
gen_statem:call(Pid, is_activated).
|
||||
|
||||
-spec change_status(Pid :: pid() | undefined, NewStatus :: integer()) -> no_return().
|
||||
change_status(undefined, _) ->
|
||||
ok;
|
||||
change_status(Pid, NewStatus) when is_pid(Pid), is_integer(NewStatus) ->
|
||||
gen_statem:cast(Pid, {change_status, NewStatus}).
|
||||
|
||||
-spec reload(Pid :: pid()) -> no_return().
|
||||
reload(Pid) when is_pid(Pid) ->
|
||||
gen_statem:cast(Pid, reload).
|
||||
|
||||
-spec auth(Pid :: pid(), Auth :: boolean()) -> no_return().
|
||||
auth(Pid, Auth) when is_pid(Pid), is_boolean(Auth) ->
|
||||
gen_statem:cast(Pid, {auth, Auth}).
|
||||
|
||||
%% @doc Creates a gen_statem process which calls Module:init/1 to
|
||||
%% initialize. To ensure a synchronized start-up procedure, this
|
||||
%% function does not return until Module:init/1 has returned.
|
||||
start_link(Name, DeviceUUID) when is_atom(Name), is_binary(DeviceUUID) ->
|
||||
gen_statem:start_link({local, Name}, ?MODULE, [DeviceUUID], []);
|
||||
start_link(Name, DeviceInfo) when is_atom(Name), is_map(DeviceInfo) ->
|
||||
gen_statem:start_link({local, Name}, ?MODULE, [DeviceInfo], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_statem callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Whenever a gen_statem is started using gen_statem:start/[3,4] or
|
||||
%% gen_statem:start_link/[3,4], this function is called by the new
|
||||
%% process to initialize.
|
||||
init([DeviceUUID]) when is_binary(DeviceUUID) ->
|
||||
case device_bo:get_device_by_uuid(DeviceUUID) of
|
||||
{ok, DeviceInfo} ->
|
||||
init([DeviceInfo]);
|
||||
undefined ->
|
||||
lager:warning("[iot_device] device uuid: ~p, loaded from mysql failed", [DeviceUUID]),
|
||||
ignore
|
||||
end;
|
||||
init([DeviceInfo = #{<<"device_uuid">> := DeviceUUID, <<"authorize_status">> := AuthorizeStatus, <<"status">> := Status}]) when is_map(DeviceInfo) ->
|
||||
case AuthorizeStatus =:= ?DEVICE_AUTH_AUTHED of
|
||||
true ->
|
||||
{ok, ?STATE_ACTIVATED, #state{device_uuid = DeviceUUID, status = Status}};
|
||||
false ->
|
||||
{ok, ?STATE_DENIED, #state{device_uuid = DeviceUUID, status = Status}}
|
||||
end.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it needs to find out
|
||||
%% the callback mode of the callback module.
|
||||
callback_mode() ->
|
||||
handle_event_function.
|
||||
|
||||
%% @private
|
||||
%% @doc If callback_mode is handle_event_function, then whenever a
|
||||
%% gen_statem receives an event from call/2, cast/2, or as a normal
|
||||
%% process message, this function is called.
|
||||
|
||||
%% 判断是否是激活状态
|
||||
handle_event({call, From}, is_activated, StateName, State = #state{}) ->
|
||||
{keep_state, State, [{reply, From, StateName =:= ?STATE_ACTIVATED}]};
|
||||
|
||||
%% 改变为在线状态,但是数据库中的状态已经是在线状态,忽略
|
||||
handle_event(cast, {change_status, ?DEVICE_ONLINE}, _, State = #state{status = ?DEVICE_ONLINE}) ->
|
||||
{keep_state, State};
|
||||
%% 改变数据库的状态, 其他情况下执行次数都很少
|
||||
handle_event(cast, {change_status, ?DEVICE_ONLINE}, _, State = #state{device_uuid = DeviceUUID}) ->
|
||||
{ok, _} = device_bo:change_status(DeviceUUID, ?DEVICE_ONLINE),
|
||||
report_event(DeviceUUID, ?DEVICE_ONLINE),
|
||||
{keep_state, State#state{status = ?DEVICE_ONLINE}};
|
||||
|
||||
handle_event(cast, {change_status, ?DEVICE_OFFLINE}, _, State = #state{device_uuid = DeviceUUID}) ->
|
||||
{ok, #{<<"status">> := Status}} = device_bo:get_device_by_uuid(DeviceUUID),
|
||||
case Status of
|
||||
?DEVICE_NOT_JOINED ->
|
||||
lager:debug("[iot_device] device: ~p, device_maybe_offline, not joined, can not change to offline", [DeviceUUID]),
|
||||
{keep_state, State#state{status = ?DEVICE_NOT_JOINED}};
|
||||
?DEVICE_OFFLINE ->
|
||||
lager:debug("[iot_device] device: ~p, device_maybe_offline, is offline, do nothing", [DeviceUUID]),
|
||||
{keep_state, State#state{status = ?DEVICE_OFFLINE}};
|
||||
?DEVICE_ONLINE ->
|
||||
{ok, _} = device_bo:change_status(DeviceUUID, ?DEVICE_OFFLINE),
|
||||
report_event(DeviceUUID, ?DEVICE_OFFLINE),
|
||||
{keep_state, State#state{status = ?DEVICE_OFFLINE}}
|
||||
end;
|
||||
|
||||
%% 重新加载数据库数据
|
||||
handle_event(cast, reload, _, State = #state{device_uuid = DeviceUUID}) ->
|
||||
lager:debug("[iot_device] will reload: ~p", [DeviceUUID]),
|
||||
case device_bo:get_device_by_uuid(DeviceUUID) of
|
||||
{ok, #{<<"authorize_status">> := AuthorizeStatus, <<"status">> := Status}} ->
|
||||
case AuthorizeStatus =:= ?DEVICE_AUTH_AUTHED of
|
||||
true ->
|
||||
{next_state, ?STATE_ACTIVATED, State#state{status = Status}};
|
||||
false ->
|
||||
{next_state, ?STATE_DENIED, State#state{status = Status}}
|
||||
end;
|
||||
undefined ->
|
||||
lager:warning("[iot_device] device uuid: ~p, loaded from mysql failed", [DeviceUUID]),
|
||||
{stop, normal, State}
|
||||
end;
|
||||
|
||||
%% 处理授权
|
||||
handle_event(cast, {auth, Auth}, StateName, State = #state{device_uuid = DeviceUUID}) ->
|
||||
case {StateName, Auth} of
|
||||
{?STATE_DENIED, false} ->
|
||||
lager:debug("[iot_device] device_uuid: ~p, auth: false, will keep state_name: ~p", [DeviceUUID, ?STATE_DENIED]),
|
||||
{keep_state, State};
|
||||
{?STATE_DENIED, true} ->
|
||||
{next_state, ?STATE_ACTIVATED, State};
|
||||
|
||||
{?STATE_ACTIVATED, false} ->
|
||||
lager:debug("[iot_device] device_uuid: ~p, auth: false, state_name from: ~p, to: ~p", [DeviceUUID, ?STATE_ACTIVATED, ?STATE_DENIED]),
|
||||
{next_state, ?STATE_DENIED, State};
|
||||
{?STATE_ACTIVATED, true} ->
|
||||
lager:debug("[iot_device] device_uuid: ~p, auth: true, will keep state_name: ~p", [DeviceUUID, ?STATE_ACTIVATED]),
|
||||
{keep_state, State}
|
||||
end.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_statem terminates with
|
||||
%% Reason. The return value is ignored.
|
||||
terminate(Reason, StateName, #state{device_uuid = DeviceUUID}) ->
|
||||
lager:notice("[iot_device] device_uuid: ~p, state_name: ~p, terminate with reason: ~p", [DeviceUUID, StateName, Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
code_change(_OldVsn, StateName, State = #state{}, _Extra) ->
|
||||
{ok, StateName, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
-spec report_event(DeviceUUID :: binary(), NewStatus :: integer()) -> no_return().
|
||||
report_event(DeviceUUID, NewStatus) when is_binary(DeviceUUID), is_integer(NewStatus) ->
|
||||
TextMap = #{
|
||||
0 => <<"离线"/utf8>>,
|
||||
1 => <<"在线"/utf8>>
|
||||
},
|
||||
%% 设备的状态信息上报给中电
|
||||
Timestamp = iot_util:timestamp_of_seconds(),
|
||||
FieldsList = [#{
|
||||
<<"key">> => <<"device_status">>,
|
||||
<<"value">> => NewStatus,
|
||||
<<"value_text">> => maps:get(NewStatus, TextMap),
|
||||
<<"unit">> => 0,
|
||||
<<"type">> => <<"DI">>,
|
||||
<<"name">> => <<"设备状态"/utf8>>,
|
||||
<<"timestamp">> => Timestamp
|
||||
}],
|
||||
iot_router:route_uuid(DeviceUUID, FieldsList, Timestamp),
|
||||
lager:debug("[iot_device] device_uuid: ~p, route fields: ~p", [DeviceUUID, FieldsList]).
|
||||
60
apps/iot/src/iot_device_sup.erl
Normal file
60
apps/iot/src/iot_device_sup.erl
Normal file
@ -0,0 +1,60 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%% @end
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_device_sup).
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(supervisor).
|
||||
|
||||
-export([start_link/0, init/1, delete_device/1, ensured_device_started/1]).
|
||||
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
init([]) ->
|
||||
%% 启动主机相关的devices
|
||||
{ok, DeviceInfos} = device_bo:get_all_devices(),
|
||||
Specs = lists:map(fun child_spec/1, DeviceInfos),
|
||||
|
||||
{ok, {#{strategy => one_for_one, intensity => 1000, period => 3600}, Specs}}.
|
||||
|
||||
-spec ensured_device_started(UUID :: binary()) -> {ok, Pid :: pid()} | {error, Reason :: any()}.
|
||||
ensured_device_started(DeviceUUID) when is_binary(DeviceUUID) ->
|
||||
case iot_device:get_pid(DeviceUUID) of
|
||||
undefined ->
|
||||
case supervisor:start_child(?MODULE, child_spec(DeviceUUID)) of
|
||||
{ok, Pid} when is_pid(Pid) ->
|
||||
{ok, Pid};
|
||||
{error, {'already_started', Pid}} when is_pid(Pid) ->
|
||||
{ok, Pid};
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
end;
|
||||
Pid when is_pid(Pid) ->
|
||||
{ok, Pid}
|
||||
end.
|
||||
|
||||
delete_device(UUID) when is_binary(UUID) ->
|
||||
Id = iot_device:get_name(UUID),
|
||||
ok = supervisor:terminate_child(?MODULE, Id),
|
||||
supervisor:delete_child(?MODULE, Id).
|
||||
|
||||
child_spec(DeviceUUID) when is_binary(DeviceUUID) ->
|
||||
Name = iot_device:get_name(DeviceUUID),
|
||||
#{id => Name,
|
||||
start => {iot_device, start_link, [Name, DeviceUUID]},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => ['iot_device']};
|
||||
child_spec(DeviceInfo = #{<<"device_uuid">> := DeviceUUID}) when is_binary(DeviceUUID) ->
|
||||
Name = iot_device:get_name(DeviceUUID),
|
||||
#{id => Name,
|
||||
start => {iot_device, start_link, [Name, DeviceInfo]},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => ['iot_device']}.
|
||||
641
apps/iot/src/iot_host.erl
Normal file
641
apps/iot/src/iot_host.erl
Normal file
@ -0,0 +1,641 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 22. 9月 2023 16:38
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_host).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_statem).
|
||||
|
||||
%% 心跳包检测时间间隔, 15分钟检测一次
|
||||
-define(HEARTBEAT_INTERVAL, 900 * 1000).
|
||||
|
||||
%% 状态
|
||||
-define(STATE_DENIED, denied).
|
||||
-define(STATE_ACTIVATED, activated).
|
||||
|
||||
%% API
|
||||
-export([start_link/2, get_name/1, get_alias_name/1, get_pid/1, handle/2, activate/2]).
|
||||
-export([get_metric/1, publish_message/4, get_aes/1, get_status/1]).
|
||||
-export([publish_directive/6, send_directive/5]).
|
||||
-export([create_session/2, attach_channel/2]).
|
||||
-export([reload_device/2, delete_device/2, activate_device/3]).
|
||||
-export([heartbeat/1]).
|
||||
|
||||
%% gen_statem callbacks
|
||||
-export([init/1, handle_event/4, terminate/3, code_change/4, callback_mode/0]).
|
||||
|
||||
-record(state, {
|
||||
host_id :: integer(),
|
||||
%% 从数据库里面读取到的数据
|
||||
uuid :: binary(),
|
||||
%% aes的key, 后续通讯需要基于这个加密
|
||||
aes = <<>> :: binary(),
|
||||
has_session = false :: boolean(),
|
||||
%% 心跳计数器
|
||||
heartbeat_counter = 0 :: integer(),
|
||||
%% websocket相关
|
||||
channel_pid :: undefined | pid(),
|
||||
%% 主机的相关信息
|
||||
metrics = #{} :: map()
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
-spec get_pid(UUID :: binary()) -> undefined | pid().
|
||||
get_pid(UUID) when is_binary(UUID) ->
|
||||
Name = get_name(UUID),
|
||||
whereis(Name).
|
||||
|
||||
-spec get_name(UUID :: binary()) -> atom().
|
||||
get_name(UUID) when is_binary(UUID) ->
|
||||
binary_to_atom(<<"iot_host:", UUID/binary>>).
|
||||
|
||||
-spec get_alias_name(HostId :: integer()) -> atom().
|
||||
get_alias_name(HostId0) when is_integer(HostId0) ->
|
||||
HostId = integer_to_binary(HostId0),
|
||||
binary_to_atom(<<"iot_host_id:", HostId/binary>>).
|
||||
|
||||
%% 处理消息
|
||||
-spec handle(Pid :: pid(), Packet :: {atom(), binary()} | {atom(), {binary(), binary()}}) -> no_return().
|
||||
handle(Pid, Packet) when is_pid(Pid) ->
|
||||
gen_statem:cast(Pid, {handle, Packet}).
|
||||
|
||||
-spec get_aes(Pid :: pid()) -> {ok, Aes :: binary()}.
|
||||
get_aes(Pid) when is_pid(Pid) ->
|
||||
gen_statem:call(Pid, get_aes).
|
||||
|
||||
-spec get_status(Pid :: pid()) -> {ok, Status :: map()}.
|
||||
get_status(Pid) when is_pid(Pid) ->
|
||||
gen_statem:call(Pid, get_status).
|
||||
|
||||
%% 激活主机, true 表示激活; false表示关闭激活
|
||||
-spec activate(Pid :: pid(), Auth :: boolean()) -> ok.
|
||||
activate(Pid, Auth) when is_pid(Pid), is_boolean(Auth) ->
|
||||
gen_statem:call(Pid, {activate, Auth}).
|
||||
|
||||
-spec get_metric(Pid :: pid()) -> {ok, MetricInfo :: map()}.
|
||||
get_metric(Pid) when is_pid(Pid) ->
|
||||
gen_statem:call(Pid, get_metric).
|
||||
|
||||
-spec attach_channel(pid(), pid()) -> ok | {error, Reason :: binary()}.
|
||||
attach_channel(Pid, ChannelPid) when is_pid(Pid), is_pid(ChannelPid) ->
|
||||
gen_statem:call(Pid, {attach_channel, ChannelPid}).
|
||||
|
||||
-spec create_session(Pid :: pid(), PubKey :: binary()) -> {ok, Reply :: binary()}.
|
||||
create_session(Pid, PubKey) when is_pid(Pid), is_binary(PubKey) ->
|
||||
gen_statem:call(Pid, {create_session, PubKey}).
|
||||
|
||||
%% 这里占用的的调用进程的时间
|
||||
-spec publish_message(Pid :: pid(), CommandType :: integer(), Params :: binary() | {Encrypt :: atom(), Params :: binary()}, Timeout :: integer()) ->
|
||||
ok | {ok, Response :: binary()} | {error, Reason :: any()}.
|
||||
publish_message(Pid, CommandType, Params, Timeout) when is_pid(Pid), is_integer(CommandType), is_integer(Timeout) ->
|
||||
case gen_statem:call(Pid, {publish_message, self(), CommandType, Params}) of
|
||||
{ok, Ref} ->
|
||||
receive
|
||||
{ws_response, Ref} ->
|
||||
ok;
|
||||
{ws_response, Ref, Response} ->
|
||||
{ok, Response}
|
||||
after Timeout ->
|
||||
{error, timeout}
|
||||
end;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec publish_directive(Pid :: pid(), DeviceUUID :: binary(), DirectiveType :: integer(), Version :: binary(), DirectiveParams :: binary() | map(), Timeout :: integer()) ->
|
||||
ok | {ok, Response :: binary()} | {error, Reason :: any()}.
|
||||
publish_directive(Pid, DeviceUUID, DirectiveType, Version, DirectiveParams, Timeout)
|
||||
when is_pid(Pid), is_binary(DeviceUUID), is_integer(DirectiveType), is_binary(Version), is_binary(DirectiveParams); is_map(DirectiveParams), is_integer(Timeout) ->
|
||||
|
||||
Directive = #{
|
||||
<<"device_uuid">> => DeviceUUID,
|
||||
<<"version">> => Version,
|
||||
<<"directive_type">> => DirectiveType,
|
||||
<<"directive">> => DirectiveParams
|
||||
},
|
||||
JsonDirective = iolist_to_binary(jiffy:encode(Directive, [force_utf8])),
|
||||
|
||||
case gen_statem:call(Pid, {publish_directive, self(), JsonDirective}) of
|
||||
{ok, Ref} ->
|
||||
receive
|
||||
{ws_response, Ref} ->
|
||||
ok;
|
||||
{ws_response, Ref, Response} ->
|
||||
{ok, Response}
|
||||
after Timeout ->
|
||||
{error, timeout}
|
||||
end;
|
||||
{error, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec send_directive(Pid :: pid(), DeviceUUID :: binary(), DirectiveType :: integer(), Version :: binary(), DirectiveParams :: binary() | map()) ->
|
||||
ok | {error, Reason :: any()}.
|
||||
send_directive(Pid, DeviceUUID, DirectiveType, Version, DirectiveParams)
|
||||
when is_pid(Pid), is_binary(DeviceUUID), is_integer(DirectiveType), is_binary(Version), is_binary(DirectiveParams); is_map(DirectiveParams) ->
|
||||
|
||||
Directive = #{
|
||||
<<"device_uuid">> => DeviceUUID,
|
||||
<<"version">> => Version,
|
||||
<<"directive_type">> => DirectiveType,
|
||||
<<"directive">> => DirectiveParams
|
||||
},
|
||||
JsonDirective = iolist_to_binary(jiffy:encode(Directive, [force_utf8])),
|
||||
|
||||
gen_statem:call(Pid, {send_directive, JsonDirective}).
|
||||
|
||||
%% 设备管理相关
|
||||
|
||||
-spec reload_device(Pid :: pid(), DeviceUUID :: binary()) -> ok | {error, Reason :: any()}.
|
||||
reload_device(Pid, DeviceUUID) when is_pid(Pid), is_binary(DeviceUUID) ->
|
||||
gen_statem:call(Pid, {reload_device, DeviceUUID}).
|
||||
|
||||
-spec delete_device(Pid :: pid(), DeviceUUID :: binary()) -> ok.
|
||||
delete_device(Pid, DeviceUUID) when is_pid(Pid), is_binary(DeviceUUID) ->
|
||||
gen_statem:call(Pid, {delete_device, DeviceUUID}).
|
||||
|
||||
-spec activate_device(Pid :: pid(), DeviceUUID :: binary(), Auth :: boolean()) -> ok | {error, Reason :: any()}.
|
||||
activate_device(Pid, DeviceUUID, Auth) when is_pid(Pid), is_binary(DeviceUUID), is_boolean(Auth) ->
|
||||
gen_statem:call(Pid, {activate_device, DeviceUUID, Auth}).
|
||||
|
||||
-spec heartbeat(Pid :: pid()) -> no_return().
|
||||
heartbeat(undefined) ->
|
||||
ok;
|
||||
heartbeat(Pid) when is_pid(Pid) ->
|
||||
gen_statem:cast(Pid, heartbeat).
|
||||
|
||||
%% @doc Creates a gen_statem process which calls Module:init/1 to
|
||||
%% initialize. To ensure a synchronized start-up procedure, this
|
||||
%% function does not return until Module:init/1 has returned.
|
||||
start_link(Name, UUID) when is_atom(Name), is_binary(UUID) ->
|
||||
gen_statem:start_link({local, Name}, ?MODULE, [UUID], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_statem callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Whenever a gen_statem is started using gen_statem:start/[3,4] or
|
||||
%% gen_statem:start_link/[3,4], this function is called by the new
|
||||
%% process to initialize.
|
||||
init([UUID]) ->
|
||||
case host_bo:get_host_by_uuid(UUID) of
|
||||
{ok, #{<<"id">> := HostId, <<"authorize_status">> := AuthorizeStatus}} ->
|
||||
%% 通过host_id注册别名, 可以避免通过查询数据库获取HostPid
|
||||
AliasName = get_alias_name(HostId),
|
||||
global:register_name(AliasName, self()),
|
||||
Aes = list_to_binary(iot_util:rand_bytes(32)),
|
||||
|
||||
%% 心跳检测机制
|
||||
erlang:start_timer(?HEARTBEAT_INTERVAL, self(), heartbeat_ticker),
|
||||
|
||||
StateName = case AuthorizeStatus =:= 1 of
|
||||
true -> ?STATE_ACTIVATED;
|
||||
false -> ?STATE_DENIED
|
||||
end,
|
||||
{ok, StateName, #state{host_id = HostId, uuid = UUID, aes = Aes, has_session = false}};
|
||||
undefined ->
|
||||
lager:warning("[iot_host] host uuid: ~p, loaded from mysql failed", [UUID]),
|
||||
ignore
|
||||
end.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it needs to find out
|
||||
%% the callback mode of the callback module.
|
||||
callback_mode() ->
|
||||
handle_event_function.
|
||||
|
||||
%% @private
|
||||
%% @doc If callback_mode is handle_event_function, then whenever a
|
||||
%% gen_statem receives an event from call/2, cast/2, or as a normal
|
||||
%% process message, this function is called.
|
||||
|
||||
handle_event({call, From}, get_metric, _, State = #state{metrics = Metrics}) ->
|
||||
{keep_state, State, [{reply, From, {ok, Metrics}}]};
|
||||
|
||||
handle_event({call, From}, get_aes, _, State = #state{aes = Aes}) ->
|
||||
{keep_state, State, [{reply, From, {ok, Aes}}]};
|
||||
|
||||
%% 获取主机的状态
|
||||
handle_event({call, From}, get_status, _, State = #state{host_id = HostId, channel_pid = ChannelPid, heartbeat_counter = HeartbeatCounter, metrics = Metrics, has_session = HasSession}) ->
|
||||
%% 启动主机相关的devices
|
||||
{ok, Devices} = device_bo:get_host_devices(HostId),
|
||||
DeviceInfos = lists:map(fun(DeviceUUID) ->
|
||||
DevicePid = iot_device:get_pid(DeviceUUID),
|
||||
case iot_device:is_activated(DevicePid) of
|
||||
true -> {DeviceUUID, <<"activated">>};
|
||||
false -> {DeviceUUID, <<"denied">>}
|
||||
end
|
||||
end, Devices),
|
||||
|
||||
HasChannel = (ChannelPid /= undefined),
|
||||
Reply = #{
|
||||
<<"has_channel">> => HasChannel,
|
||||
<<"has_session">> => HasSession,
|
||||
<<"heartbeat_counter">> => HeartbeatCounter,
|
||||
<<"metrics">> => Metrics,
|
||||
<<"device_infos">> => DeviceInfos
|
||||
},
|
||||
{keep_state, State, [{reply, From, {ok, Reply}}]};
|
||||
|
||||
%% 发送普通格式的消息, 激活的时候,会话时创建不成功的; 发送aes类型的命令的时候,必须要求session是存在的
|
||||
handle_event({call, From}, {publish_message, ReceiverPid, CommandType, {aes, Command0}}, ?STATE_ACTIVATED,
|
||||
State = #state{uuid = UUID, aes = AES, channel_pid = ChannelPid, has_session = true}) ->
|
||||
|
||||
lager:debug("[iot_host] host: ~p, will publish aes message: ~p", [UUID, Command0]),
|
||||
Command = iot_cipher_aes:encrypt(AES, Command0),
|
||||
%% 通过websocket发送请求
|
||||
Ref = ws_channel:publish(ChannelPid, ReceiverPid, <<CommandType:8, Command/binary>>),
|
||||
|
||||
{keep_state, State, [{reply, From, {ok, Ref}}]};
|
||||
|
||||
%% 只要channel存在,就负责将消息推送到边缘端主机
|
||||
handle_event({call, From}, {publish_message, ReceiverPid, CommandType, Command}, _,
|
||||
State = #state{uuid = UUID, channel_pid = ChannelPid}) when is_binary(Command), is_pid(ChannelPid) ->
|
||||
|
||||
%% 通过websocket发送请求
|
||||
lager:debug("[iot_host] host: ~p, will publish message: ~p", [UUID, Command]),
|
||||
Ref = ws_channel:publish(ChannelPid, ReceiverPid, <<CommandType:8, Command/binary>>),
|
||||
|
||||
{keep_state, State, [{reply, From, {ok, Ref}}]};
|
||||
|
||||
handle_event({call, From}, {publish_message, _, _, _}, _, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[iot_host] uuid: ~p, publish_message invalid state: ~p", [UUID, state_map(State)]),
|
||||
{keep_state, State, [{reply, From, {error, <<"主机离线,发送命令失败"/utf8>>}}]};
|
||||
|
||||
%% 发送指令时, 指令要通过aes加密,必须要求session是存在的
|
||||
handle_event({call, From}, {publish_directive, ReceiverPid, Directive0}, ?STATE_ACTIVATED,
|
||||
State = #state{uuid = UUID, aes = AES, channel_pid = ChannelPid, has_session = true}) ->
|
||||
|
||||
lager:debug("[iot_host] host: ~p, will publish_directive: ~p", [UUID, Directive0]),
|
||||
Directive = iot_cipher_aes:encrypt(AES, Directive0),
|
||||
%% 通过websocket发送请求
|
||||
Ref = ws_channel:publish(ChannelPid, ReceiverPid, <<16:8, Directive/binary>>),
|
||||
|
||||
{keep_state, State, [{reply, From, {ok, Ref}}]};
|
||||
|
||||
%% 其他情况下,发送指令是失败的
|
||||
handle_event({call, From}, {publish_directive, _, Directive}, _, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[iot_host] uuid: ~p, publish_directive: ~p, invalid state: ~p", [UUID, Directive, state_map(State)]),
|
||||
{keep_state, State, [{reply, From, {error, <<"主机离线,发送指令失败"/utf8>>}}]};
|
||||
|
||||
%% 发送指令时, 指令要通过aes加密,必须要求session是存在的
|
||||
handle_event({call, From}, {send_directive, Directive0}, ?STATE_ACTIVATED,
|
||||
State = #state{uuid = UUID, aes = AES, channel_pid = ChannelPid, has_session = true}) ->
|
||||
|
||||
lager:debug("[iot_host] host: ~p, will publish_directive: ~p", [UUID, Directive0]),
|
||||
Directive = iot_cipher_aes:encrypt(AES, Directive0),
|
||||
%% 通过websocket发送请求
|
||||
ws_channel:send(ChannelPid, <<16:8, Directive/binary>>),
|
||||
|
||||
{keep_state, State, [{reply, From, ok}]};
|
||||
|
||||
%% 其他情况下,发送指令是失败的
|
||||
handle_event({call, From}, {send_directive, Directive}, _, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[iot_host] uuid: ~p, send_directive: ~p, invalid state: ~p", [UUID, Directive, state_map(State)]),
|
||||
{keep_state, State, [{reply, From, {error, <<"主机离线,发送指令失败"/utf8>>}}]};
|
||||
|
||||
%% 激活主机
|
||||
handle_event({call, From}, {activate, true}, _, State = #state{uuid = UUID, aes = Aes, channel_pid = ChannelPid}) when is_pid(ChannelPid) ->
|
||||
BinReply = jiffy:encode(#{<<"auth">> => true, <<"aes">> => Aes}, [force_utf8]),
|
||||
ws_channel:send(ChannelPid, <<8:8, BinReply/binary>>),
|
||||
lager:debug("[iot_host] uuid: ~p, activate: true, will send message: ~p", [UUID, BinReply]),
|
||||
{next_state, ?STATE_ACTIVATED, State, [{reply, From, ok}]};
|
||||
handle_event({call, From}, {activate, true}, _, State = #state{uuid = UUID, channel_pid = undefined}) ->
|
||||
lager:debug("[iot_host] uuid: ~p, activate: true, no channel", [UUID]),
|
||||
{next_state, ?STATE_ACTIVATED, State, [{reply, From, ok}]};
|
||||
|
||||
%% 关闭授权
|
||||
handle_event({call, From}, {activate, false}, _, State = #state{uuid = UUID, channel_pid = ChannelPid}) when is_pid(ChannelPid) ->
|
||||
BinReply = jiffy:encode(#{<<"auth">> => false}, [force_utf8]),
|
||||
ws_channel:send(ChannelPid, <<8:8, BinReply/binary>>),
|
||||
ws_channel:stop(ChannelPid, closed),
|
||||
lager:debug("[iot_host] uuid: ~p, activate: false, will send message: ~p", [UUID, BinReply]),
|
||||
{next_state, ?STATE_DENIED, State#state{channel_pid = undefined, has_session = false}, [{reply, From, ok}]};
|
||||
|
||||
handle_event({call, From}, {activate, false}, _, State = #state{uuid = UUID, channel_pid = undefined}) ->
|
||||
lager:debug("[iot_host] uuid: ~p, activate: false, no channel", [UUID]),
|
||||
{next_state, ?STATE_DENIED, State#state{has_session = false}, [{reply, From, ok}]};
|
||||
|
||||
%% 绑定channel
|
||||
handle_event({call, From}, {attach_channel, ChannelPid}, _, State = #state{uuid = UUID, channel_pid = undefined}) ->
|
||||
lager:debug("[iot_host] attach_channel host_id uuid: ~p, channel: ~p", [UUID, ChannelPid]),
|
||||
erlang:monitor(process, ChannelPid),
|
||||
{keep_state, State#state{channel_pid = ChannelPid}, [{reply, From, ok}]};
|
||||
|
||||
handle_event({call, From}, {attach_channel, _}, _, State = #state{uuid = UUID, channel_pid = OldChannelPid}) ->
|
||||
lager:notice("[iot_host] attach_channel host_id uuid: ~p, old channel exists: ~p", [UUID, OldChannelPid]),
|
||||
{keep_state, State, [{reply, From, {error, <<"channel existed">>}}]};
|
||||
|
||||
%% 授权通过后,才能将主机的状态设置为在线状态
|
||||
handle_event({call, From}, {create_session, PubKey}, ?STATE_ACTIVATED, State = #state{uuid = UUID, aes = Aes}) ->
|
||||
Reply = #{<<"a">> => true, <<"aes">> => Aes},
|
||||
EncReply = iot_cipher_rsa:encode(Reply, PubKey),
|
||||
{ok, AffectedRow} = host_bo:change_status(UUID, ?HOST_ONLINE),
|
||||
report_event(UUID, ?HOST_ONLINE),
|
||||
lager:debug("[iot_host] host_id(session) uuid: ~p, create_session, will change status, affected_row: ~p", [UUID, AffectedRow]),
|
||||
{keep_state, State#state{has_session = true}, [{reply, From, {ok, <<10:8, EncReply/binary>>}}]};
|
||||
|
||||
handle_event({call, From}, {create_session, PubKey}, ?STATE_DENIED, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[iot_host] host_id(denied) uuid: ~p, create_session, will not change host status", [UUID]),
|
||||
Reply = #{<<"a">> => false, <<"aes">> => <<"">>},
|
||||
EncReply = iot_cipher_rsa:encode(Reply, PubKey),
|
||||
{keep_state, State#state{has_session = false}, [{reply, From, {ok, <<10:8, EncReply/binary>>}}]};
|
||||
|
||||
%% 重新加载设备信息
|
||||
handle_event({call, From}, {reload_device, DeviceUUID}, _, State) ->
|
||||
case iot_device_sup:ensured_device_started(DeviceUUID) of
|
||||
{ok, DevicePid} ->
|
||||
iot_device:reload(DevicePid),
|
||||
{keep_state, State, [{reply, From, ok}]};
|
||||
{error, Reason} ->
|
||||
{keep_state, State, [{reply, From, {error, Reason}}]}
|
||||
end;
|
||||
|
||||
%% 删除设备
|
||||
handle_event({call, From}, {delete_device, DeviceUUID}, _, State) ->
|
||||
case iot_device:get_pid(DeviceUUID) of
|
||||
undefined ->
|
||||
ok;
|
||||
DevicePid when is_pid(DevicePid) ->
|
||||
iot_device_sup:delete_device(DeviceUUID)
|
||||
end,
|
||||
{keep_state, State, [{reply, From, ok}]};
|
||||
|
||||
%% 激活设备
|
||||
handle_event({call, From}, {activate_device, DeviceUUID, Auth}, _, State) ->
|
||||
case iot_device_sup:ensured_device_started(DeviceUUID) of
|
||||
{ok, DevicePid} ->
|
||||
iot_device:auth(DevicePid, Auth),
|
||||
|
||||
{keep_state, State, [{reply, From, ok}]};
|
||||
{error, Reason} ->
|
||||
{keep_state, State, [{reply, From, {error, Reason}}]}
|
||||
end;
|
||||
|
||||
%% 需要将消息转换成json格式然后再处理, 需要在host进程里面处理, 数据带有props,服务端暂时未用到
|
||||
handle_event(cast, {handle, {data, Data}}, ?STATE_ACTIVATED, State = #state{aes = AES, has_session = true}) ->
|
||||
PlainData = iot_cipher_aes:decrypt(AES, Data),
|
||||
case catch jiffy:decode(PlainData, [return_maps]) of
|
||||
Info when is_map(Info) ->
|
||||
handle_data(Info, State);
|
||||
Other ->
|
||||
lager:notice("[iot_host] the data is invalid json: ~p", [Other])
|
||||
end,
|
||||
{keep_state, State};
|
||||
|
||||
%% ping的数据是通过aes加密后的,因此需要在有会话的情况下才行
|
||||
handle_event(cast, {handle, {ping, CipherMetric}}, ?STATE_ACTIVATED, State = #state{uuid = UUID, aes = AES, has_session = true}) ->
|
||||
MetricsInfo = iot_cipher_aes:decrypt(AES, CipherMetric),
|
||||
case catch jiffy:decode(MetricsInfo, [return_maps]) of
|
||||
Metrics when is_map(Metrics) ->
|
||||
lager:debug("[iot_host] host_id uuid: ~p, get ping: ~p", [UUID, Metrics]),
|
||||
{keep_state, State#state{metrics = Metrics}};
|
||||
Other ->
|
||||
lager:warning("[iot_host] host_id: ~p, ping is invalid json: ~p", [UUID, Other]),
|
||||
{keep_state, State}
|
||||
end;
|
||||
|
||||
handle_event(cast, {handle, {inform, Info0}}, ?STATE_ACTIVATED, State = #state{uuid = UUID, host_id = HostId, aes = AES, has_session = true}) ->
|
||||
Info = iot_cipher_aes:decrypt(AES, Info0),
|
||||
case catch jiffy:decode(Info, [return_maps]) of
|
||||
#{<<"at">> := At, <<"services">> := ServiceInforms} ->
|
||||
lager:debug("[iot_host] host: ~p, service infos is: ~p", [UUID, ServiceInforms]),
|
||||
lists:foreach(fun(#{<<"props">> := Props, <<"name">> := Name, <<"version">> := Version, <<"version_copy">> := VersionCopy, <<"status">> := Status}) ->
|
||||
%% props 主机id:场景id:微服务id
|
||||
{SceneId, MicroId} = parse_props(Props),
|
||||
micro_inform_log:insert(#{
|
||||
<<"host_id">> => HostId,
|
||||
<<"scene_id">> => SceneId,
|
||||
<<"service_name">> => Name,
|
||||
<<"version">> => Version,
|
||||
<<"version_copy">> => VersionCopy,
|
||||
<<"status">> => Status,
|
||||
<<"created_at">> => At
|
||||
}),
|
||||
micro_set_bo:change_status(HostId, SceneId, MicroId, Status)
|
||||
end, ServiceInforms);
|
||||
Error ->
|
||||
lager:warning("[iot_host] inform get error: ~p", [Error])
|
||||
end,
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(cast, {handle, {feedback_step, Info0}}, ?STATE_ACTIVATED, State = #state{aes = AES, has_session = true}) ->
|
||||
Info = iot_cipher_aes:decrypt(AES, Info0),
|
||||
case catch jiffy:decode(Info, [return_maps]) of
|
||||
Data = #{<<"task_id">> := TaskId, <<"code">> := Code} ->
|
||||
Result = scene_feedback_step:insert(#{
|
||||
<<"task_id">> => TaskId,
|
||||
<<"code">> => Code,
|
||||
<<"created_at">> => iot_util:current_time()
|
||||
}),
|
||||
lager:debug("[iot_host] feedback_step info: ~p, insert result: ~p", [Data, Result]);
|
||||
Other ->
|
||||
lager:warning("[iot_host] feedback_step error: ~p", [Other])
|
||||
end,
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(cast, {handle, {feedback_result, Info0}}, ?STATE_ACTIVATED, State = #state{aes = AES, has_session = true}) ->
|
||||
Info = iot_cipher_aes:decrypt(AES, Info0),
|
||||
case catch jiffy:decode(Info, [return_maps]) of
|
||||
#{<<"task_id">> := TaskId, <<"time">> := Time, <<"code">> := Code, <<"reason">> := Reason, <<"error">> := Error, <<"type">> := Type} ->
|
||||
scene_feedback:insert(#{
|
||||
<<"task_id">> => TaskId,
|
||||
<<"task_type">> => Type,
|
||||
<<"code">> => Code,
|
||||
<<"reason">> => Reason,
|
||||
<<"error">> => Error,
|
||||
<<"created_at">> => Time
|
||||
});
|
||||
Other ->
|
||||
lager:warning("[iot_host] feedback_result error: ~p", [Other])
|
||||
end,
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(cast, {handle, {event, Event0}}, ?STATE_ACTIVATED, State = #state{uuid = UUID, aes = AES, has_session = true}) ->
|
||||
EventText = iot_cipher_aes:decrypt(AES, Event0),
|
||||
lager:debug("[iot_host] uuid: ~p, get event: ~p", [UUID, EventText]),
|
||||
case catch jiffy:decode(EventText, [return_maps]) of
|
||||
#{<<"event_type">> := ?EVENT_DEVICE, <<"params">> := #{<<"device_uuid">> := DeviceUUID, <<"status">> := Status}} ->
|
||||
DevicePid = iot_device:get_pid(DeviceUUID),
|
||||
iot_device:change_status(DevicePid, Status);
|
||||
Event when is_map(Event) ->
|
||||
lager:warning("[iot_host] host: ~p, event: ~p, not supported", [UUID, Event]);
|
||||
Other ->
|
||||
lager:warning("[iot_host] host: ~p, event error: ~p", [UUID, Other])
|
||||
end,
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(cast, {handle, {ai_event, Event0}}, ?STATE_ACTIVATED, State = #state{uuid = UUID, aes = AES, has_session = true}) ->
|
||||
EventText = iot_cipher_aes:decrypt(AES, Event0),
|
||||
lager:debug("[iot_host] uuid: ~p, get ai_event: ~p", [UUID, EventText]),
|
||||
case catch jiffy:decode(EventText, [return_maps]) of
|
||||
#{<<"event_type">> := EventType, <<"params">> := Params0 = #{<<"device_uuid">> := DeviceUUID, <<"props">> := Props}} ->
|
||||
case iot_device:is_alive(DeviceUUID) of
|
||||
error ->
|
||||
lager:notice("[iot_host] uuid: ~p, device_uuid: ~p is not alive, get ai_event: ~p", [UUID, DeviceUUID, EventText]),
|
||||
ok;
|
||||
{ok, DevicePid} ->
|
||||
Params = maps:remove(<<"props">>, Params0),
|
||||
{SceneId, MicroId} = parse_props(Props),
|
||||
|
||||
%% 保存数据到mysql
|
||||
Message = iolist_to_binary(jiffy:encode(Params, [force_utf8])),
|
||||
case ai_event_logs_bo:insert(UUID, DeviceUUID, SceneId, MicroId, EventType, Message) of
|
||||
{ok, LogId} ->
|
||||
iot_api:ai_event(LogId);
|
||||
_ ->
|
||||
ok
|
||||
end,
|
||||
iot_device:change_status(DevicePid, ?DEVICE_ONLINE),
|
||||
|
||||
iot_ai_router:route_uuid(DeviceUUID, EventType, Params)
|
||||
end;
|
||||
Event when is_map(Event) ->
|
||||
lager:warning("[iot_host] host: ~p, event: ~p, not supported", [UUID, Event]);
|
||||
Other ->
|
||||
lager:warning("[iot_host] host: ~p, event error: ~p", [UUID, Other])
|
||||
end,
|
||||
{keep_state, State};
|
||||
|
||||
%% 心跳机制
|
||||
handle_event(cast, heartbeat, _, State = #state{heartbeat_counter = HeartbeatCounter}) ->
|
||||
{keep_state, State#state{heartbeat_counter = HeartbeatCounter + 1}};
|
||||
|
||||
%% 没有收到心跳包,主机下线, 设备状态不变
|
||||
handle_event(info, {timeout, _, heartbeat_ticker}, _, State = #state{uuid = UUID, heartbeat_counter = 0, channel_pid = ChannelPid}) ->
|
||||
lager:warning("[iot_host] uuid: ~p, heartbeat lost, devices will unknown", [UUID]),
|
||||
{ok, #{<<"status">> := Status}} = host_bo:get_host_by_uuid(UUID),
|
||||
case Status of
|
||||
?HOST_NOT_JOINED ->
|
||||
lager:debug("[iot_host] host: ~p, host_maybe_offline, host not joined, can not change to offline", [UUID]);
|
||||
?HOST_OFFLINE ->
|
||||
lager:debug("[iot_host] host: ~p, host_maybe_offline, host now is offline, do nothing", [UUID]);
|
||||
?HOST_ONLINE ->
|
||||
{ok, _} = host_bo:change_status(UUID, ?HOST_OFFLINE),
|
||||
report_event(UUID, ?HOST_OFFLINE)
|
||||
end,
|
||||
|
||||
%% 关闭channel,主机需要重新连接,才能保存状态的一致
|
||||
is_pid(ChannelPid) andalso ws_channel:stop(ChannelPid, closed),
|
||||
erlang:start_timer(?HEARTBEAT_INTERVAL, self(), heartbeat_ticker),
|
||||
|
||||
{keep_state, State#state{channel_pid = undefined, has_session = false, heartbeat_counter = 0}};
|
||||
|
||||
%% 其他情况下需要重置系统计数器
|
||||
handle_event(info, {timeout, _, heartbeat_ticker}, _, State = #state{}) ->
|
||||
erlang:start_timer(?HEARTBEAT_INTERVAL, self(), heartbeat_ticker),
|
||||
{keep_state, State#state{heartbeat_counter = 0}};
|
||||
|
||||
%% 当websocket断开的时候,主机的状态不一定改变;主机的状态改变通过心跳机制,会话状态需要改变
|
||||
handle_event(info, {'DOWN', _Ref, process, ChannelPid, Reason}, _, State = #state{uuid = UUID, channel_pid = ChannelPid, has_session = HasSession}) ->
|
||||
lager:warning("[iot_host] uuid: ~p, channel: ~p, down with reason: ~p, has_session: ~p, state: ~p", [UUID, ChannelPid, Reason, HasSession, State]),
|
||||
{keep_state, State#state{channel_pid = undefined, has_session = false}};
|
||||
|
||||
handle_event(info, {'DOWN', _Ref, process, Pid, Reason}, _, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[iot_host] uuid: ~p, process_pid: ~p, down with reason: ~p, state: ~p", [UUID, Pid, Reason, State]),
|
||||
{keep_state, State};
|
||||
|
||||
handle_event(Event, Info, StateName, State = #state{uuid = UUID}) ->
|
||||
lager:warning("[iot_host] host: ~p, event: ~p, unknown message: ~p, state_name: ~p, state: ~p", [UUID, Event, Info, StateName, state_map(State)]),
|
||||
{keep_state, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_statem when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_statem terminates with
|
||||
%% Reason. The return value is ignored.
|
||||
terminate(Reason, _StateName, _State = #state{uuid = UUID, has_session = HasSession}) ->
|
||||
lager:debug("[iot_host] host: ~p, terminate with reason: ~p, has_session: ~p", [UUID, Reason, HasSession]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
code_change(_OldVsn, StateName, State = #state{}, _Extra) ->
|
||||
{ok, StateName, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
%% 处理相关数据
|
||||
handle_data(#{<<"device_uuid">> := DeviceUUID, <<"service_name">> := ServiceName, <<"at">> := Timestamp, <<"fields">> := FieldsList, <<"tags">> := Tags}, #state{uuid = UUID})
|
||||
when is_binary(DeviceUUID), DeviceUUID /= <<>> ->
|
||||
|
||||
case iot_device:get_pid(DeviceUUID) of
|
||||
undefined ->
|
||||
lager:warning("[iot_host] host uuid: ~p, device uuid: ~p not found, fields: ~p, tags: ~p", [UUID, DeviceUUID, FieldsList, Tags]),
|
||||
ok;
|
||||
DevicePid when is_pid(DevicePid) ->
|
||||
case iot_device:is_activated(DevicePid) of
|
||||
true ->
|
||||
%% 查找终端设备对应的点位信息
|
||||
iot_router:route_uuid(DeviceUUID, FieldsList, Timestamp),
|
||||
|
||||
%% 数据写入influxdb
|
||||
NTags = Tags#{<<"uuid">> => UUID, <<"service_name">> => ServiceName, <<"device_uuid">> => DeviceUUID},
|
||||
influx_client:write_data(DeviceUUID, NTags, FieldsList, Timestamp),
|
||||
|
||||
iot_device:change_status(DevicePid, ?DEVICE_ONLINE);
|
||||
false ->
|
||||
lager:warning("[iot_host] host uuid: ~p, device_uuid: ~p not activated, fields: ~p, tags: ~p", [UUID, DeviceUUID, FieldsList, Tags])
|
||||
end
|
||||
end;
|
||||
|
||||
handle_data(#{<<"service_name">> := ServiceName, <<"at">> := Timestamp, <<"fields">> := FieldsList, <<"tags">> := Tags}, #state{uuid = UUID}) ->
|
||||
%% 查找终端设备对应的点位信息
|
||||
iot_router:route_uuid(UUID, FieldsList, Timestamp),
|
||||
|
||||
%% 数据写入influxdb
|
||||
NTags = Tags#{<<"uuid">> => UUID, <<"service_name">> => ServiceName},
|
||||
influx_client:write_data(UUID, NTags, FieldsList, Timestamp).
|
||||
|
||||
-spec report_event(UUID :: binary(), NewStatus :: integer()) -> no_return().
|
||||
report_event(UUID, NewStatus) when is_binary(UUID), is_integer(NewStatus) ->
|
||||
TextMap = #{
|
||||
0 => <<"离线"/utf8>>,
|
||||
1 => <<"在线"/utf8>>
|
||||
},
|
||||
|
||||
%% 设备的状态信息上报给中电
|
||||
Timestamp = iot_util:timestamp_of_seconds(),
|
||||
FieldsList = [#{
|
||||
<<"key">> => <<"host_status">>,
|
||||
<<"value">> => NewStatus,
|
||||
<<"value_text">> => maps:get(NewStatus, TextMap),
|
||||
<<"unit">> => 0,
|
||||
<<"type">> => <<"DI">>,
|
||||
<<"name">> => <<"主机状态"/utf8>>,
|
||||
<<"timestamp">> => Timestamp
|
||||
}],
|
||||
iot_router:route_uuid(UUID, FieldsList, Timestamp),
|
||||
lager:debug("[iot_host] host_uuid: ~p, route fields: ~p", [UUID, FieldsList]).
|
||||
|
||||
%% 将当前的state转换成map
|
||||
state_map(#state{host_id = HostId, uuid = UUID, aes = Aes, has_session = HasSession, heartbeat_counter = HeartbeatCounter, channel_pid = ChannelPid, metrics = Metrics}) ->
|
||||
#{
|
||||
host_id => HostId,
|
||||
uuid => UUID,
|
||||
aes => Aes,
|
||||
has_session => HasSession,
|
||||
heartbeat_counter => HeartbeatCounter,
|
||||
channel_pid => ChannelPid,
|
||||
metrics => Metrics
|
||||
}.
|
||||
|
||||
%% props 主机id:场景id:微服务id
|
||||
-spec parse_props(Props :: undefined | binary()) -> {SceneId :: integer(), MicroId :: integer()}.
|
||||
parse_props(Props) when is_binary(Props) ->
|
||||
%% props 主机id:场景id:微服务id
|
||||
[_, SceneId0, MicroId0] = binary:split(Props, <<":">>, [global]),
|
||||
SceneId = binary_to_integer(SceneId0),
|
||||
MicroId = binary_to_integer(MicroId0),
|
||||
{SceneId, MicroId}.
|
||||
50
apps/iot/src/iot_host_sup.erl
Normal file
50
apps/iot/src/iot_host_sup.erl
Normal file
@ -0,0 +1,50 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%% @end
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_host_sup).
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(supervisor).
|
||||
|
||||
-export([start_link/0, init/1, delete_host/1, ensured_host_started/1]).
|
||||
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
|
||||
|
||||
init([]) ->
|
||||
Specs = lists:map(fun child_spec/1, host_bo:get_all_hosts()),
|
||||
|
||||
{ok, {#{strategy => one_for_one, intensity => 1000, period => 3600}, Specs}}.
|
||||
|
||||
-spec ensured_host_started(UUID :: binary()) -> {ok, Pid :: pid()} | {error, Reason :: any()}.
|
||||
ensured_host_started(UUID) when is_binary(UUID) ->
|
||||
case iot_host:get_pid(UUID) of
|
||||
undefined ->
|
||||
case supervisor:start_child(?MODULE, child_spec(UUID)) of
|
||||
{ok, Pid} when is_pid(Pid) ->
|
||||
{ok, Pid};
|
||||
{error, {'already_started', Pid}} when is_pid(Pid) ->
|
||||
{ok, Pid};
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
end;
|
||||
Pid when is_pid(Pid) ->
|
||||
{ok, Pid}
|
||||
end.
|
||||
|
||||
delete_host(UUID) when is_binary(UUID) ->
|
||||
Id = iot_host:get_name(UUID),
|
||||
ok = supervisor:terminate_child(?MODULE, Id),
|
||||
supervisor:delete_child(?MODULE, Id).
|
||||
|
||||
child_spec(UUID) ->
|
||||
Id = iot_host:get_name(UUID),
|
||||
#{id => Id,
|
||||
start => {iot_host, start_link, [Id, UUID]},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => ['iot_host']}.
|
||||
40
apps/iot/src/iot_http_client.erl
Normal file
40
apps/iot/src/iot_http_client.erl
Normal file
@ -0,0 +1,40 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 03. 3月 2023 11:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_http_client).
|
||||
-author("licheng5").
|
||||
|
||||
%% API
|
||||
-export([post/2]).
|
||||
|
||||
post(Url, Body) when is_list(Url), is_binary(Body) ->
|
||||
case hackney:request(post, Url, [], Body) of
|
||||
{ok, 200, _, ClientRef} ->
|
||||
case hackney:body(ClientRef) of
|
||||
{ok, RespBody} ->
|
||||
lager:debug("[iot_http_client] url: ~p, response is: ~p", [Url, RespBody]),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
lager:warning("[iot_http_client] url: ~p, get error: ~p", [Url, Reason]),
|
||||
{error, Reason}
|
||||
end;
|
||||
|
||||
{ok, HttpCode, _, ClientRef} ->
|
||||
case hackney:body(ClientRef) of
|
||||
{ok, RespBody} ->
|
||||
lager:debug("[iot_http_client] url: ~p, http_code: ~p, response is: ~p", [Url, HttpCode, RespBody]),
|
||||
ok;
|
||||
{error, Reason} ->
|
||||
lager:warning("[iot_http_client] url: ~p, http_code: ~p, get error: ~p", [Url, HttpCode, Reason]),
|
||||
{error, Reason}
|
||||
end;
|
||||
|
||||
{error, Reason} ->
|
||||
lager:warning("[iot_http_client] url: ~p, get error: ~p", [Url, Reason]),
|
||||
{error, Reason}
|
||||
end.
|
||||
157
apps/iot/src/iot_logger.erl
Normal file
157
apps/iot/src/iot_logger.erl
Normal file
@ -0,0 +1,157 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 07. 9月 2023 17:07
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_logger).
|
||||
-author("aresei").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/1, write/2]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
-record(state, {
|
||||
file_name :: string(),
|
||||
date :: calendar:date(),
|
||||
file
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
write(Pid, Data) when is_pid(Pid) ->
|
||||
gen_server:cast(Pid, {write, Data}).
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link(FileName :: string()) ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link(FileName) when is_list(FileName) ->
|
||||
gen_server:start_link(?MODULE, [FileName], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([FileName]) ->
|
||||
ensure_dir(),
|
||||
FilePath = make_file(FileName),
|
||||
{ok, File} = file:open(FilePath, [append, binary]),
|
||||
|
||||
{ok, #state{file = File, file_name = FileName, date = get_date()}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Request, _From, State = #state{}) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast({write, Data}, State = #state{file = OldFile, file_name = FileName, date = Date}) ->
|
||||
Line = <<(time_prefix())/binary, " ", (format(Data))/binary, $\n>>,
|
||||
case maybe_new_file(Date) of
|
||||
true ->
|
||||
file:close(OldFile),
|
||||
|
||||
FilePath = make_file(FileName),
|
||||
{ok, File} = file:open(FilePath, [append, binary]),
|
||||
ok = file:write(File, Line),
|
||||
{noreply, State#state{file = File, date = get_date()}};
|
||||
false ->
|
||||
ok = file:write(OldFile, Line),
|
||||
{noreply, State}
|
||||
end.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info(_Info, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(_Reason, _State = #state{}) ->
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
|
||||
format(Data) when is_binary(Data) ->
|
||||
iolist_to_binary(Data);
|
||||
format(Items) when is_list(Items) ->
|
||||
iolist_to_binary(lists:join(<<"\t">>, Items)).
|
||||
|
||||
time_prefix() ->
|
||||
{{Y, M, D}, {H, I, S}} = calendar:local_time(),
|
||||
iolist_to_binary(io_lib:format("[~b-~2..0b-~2..0b ~2..0b:~2..0b:~2..0b]", [Y, M, D, H, I, S])).
|
||||
|
||||
-spec make_file(LogFile :: string()) -> string().
|
||||
make_file(LogFile) when is_list(LogFile) ->
|
||||
{Year, Month, Day} = erlang:date(),
|
||||
Suffix = io_lib:format("~b~2..0b~2..0b", [Year, Month, Day]),
|
||||
RootDir = code:root_dir() ++ "/log/",
|
||||
lists:flatten(RootDir ++ LogFile ++ "." ++ Suffix).
|
||||
|
||||
ensure_dir() ->
|
||||
RootDir = code:root_dir() ++ "/log/",
|
||||
case filelib:is_dir(RootDir) of
|
||||
true ->
|
||||
ok;
|
||||
false ->
|
||||
file:make_dir(RootDir)
|
||||
end.
|
||||
|
||||
%% 获取日期信息
|
||||
-spec get_date() -> Date :: calendar:date().
|
||||
get_date() ->
|
||||
{Date, _} = calendar:local_time(),
|
||||
Date.
|
||||
|
||||
%% 通过日志判断是否需要生成新的日志文件
|
||||
-spec maybe_new_file(Date :: calendar:date()) -> boolean().
|
||||
maybe_new_file({Y, M, D}) ->
|
||||
{{Y0, M0, D0}, _} = calendar:local_time(),
|
||||
not (Y =:= Y0 andalso M =:= M0 andalso D =:= D0).
|
||||
148
apps/iot/src/iot_mqtt_subscriber.erl
Normal file
148
apps/iot/src/iot_mqtt_subscriber.erl
Normal file
@ -0,0 +1,148 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%% 1. 需要考虑集群部署的相关问题,上行的数据可能在集群中共享
|
||||
%%% 2. host进程不能直接去监听topic,这样涉及到新增和下线的很多问题
|
||||
%%% @end
|
||||
%%% Created : 12. 3月 2023 21:27
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_mqtt_subscriber).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/0]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
%% 需要订阅的主题信息
|
||||
-define(Topics,[
|
||||
{<<"CET/NX/upload">>, 2}
|
||||
]).
|
||||
|
||||
-record(state, {
|
||||
conn_pid :: pid()
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link() ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link() ->
|
||||
gen_server:start_link({local, ?MODULE}, ?MODULE, [], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([]) ->
|
||||
%% 建立到emqx服务器的连接
|
||||
Opts = iot_config:emqt_opts(<<"host-subscriber">>),
|
||||
lager:debug("[opts] is: ~p", [Opts]),
|
||||
case emqtt:start_link(Opts) of
|
||||
{ok, ConnPid} ->
|
||||
%% 监听和host相关的全部事件
|
||||
lager:debug("[iot_mqtt_subscriber] start conntecting, pid: ~p", [ConnPid]),
|
||||
{ok, _} = emqtt:connect(ConnPid),
|
||||
lager:debug("[iot_mqtt_subscriber] connect success, pid: ~p", [ConnPid]),
|
||||
SubscribeResult = emqtt:subscribe(ConnPid, ?Topics),
|
||||
|
||||
lager:debug("[iot_mqtt_subscriber] subscribe topics: ~p, result is: ~p", [?Topics, SubscribeResult]),
|
||||
|
||||
{ok, #state{conn_pid = ConnPid}};
|
||||
ignore ->
|
||||
lager:debug("[iot_mqtt_subscriber] connect emqx get ignore"),
|
||||
{stop, ignore};
|
||||
{error, Reason} ->
|
||||
lager:debug("[iot_mqtt_subscriber] connect emqx get error: ~p", [Reason]),
|
||||
{stop, Reason}
|
||||
end.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Info, _From, State = #state{conn_pid = _ConnPid}) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast(_Request, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info({disconnect, ReasonCode, Properties}, State = #state{}) ->
|
||||
lager:debug("[iot_mqtt_subscriber] Recv a DISONNECT packet - ReasonCode: ~p, Properties: ~p", [ReasonCode, Properties]),
|
||||
{stop, disconnected, State};
|
||||
%% 必须要做到消息的快速分发,数据的json反序列需要在host进程进行
|
||||
handle_info({publish, #{packet_id := _PacketId, payload := Payload, qos := Qos, topic := Topic}}, State = #state{conn_pid = _ConnPid}) ->
|
||||
lager:debug("[iot_mqtt_subscriber] Recv a topic: ~p, publish packet: ~p, qos: ~p", [Topic, Payload, Qos]),
|
||||
%% 将消息分发到对应的host进程去处理
|
||||
{noreply, State};
|
||||
handle_info({puback, Packet = #{packet_id := _PacketId}}, State = #state{}) ->
|
||||
lager:debug("[iot_mqtt_subscriber] receive puback packet: ~p", [Packet]),
|
||||
{noreply, State};
|
||||
|
||||
handle_info(Info, State = #state{}) ->
|
||||
lager:debug("[iot_mqtt_subscriber] get info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(Reason, _State = #state{conn_pid = ConnPid}) when is_pid(ConnPid) ->
|
||||
%% 取消topic的订阅
|
||||
TopicNames = lists:map(fun({Name, _}) -> Name end, ?Topics),
|
||||
{ok, _Props, _ReasonCode} = emqtt:unsubscribe(ConnPid, #{}, TopicNames),
|
||||
|
||||
ok = emqtt:disconnect(ConnPid),
|
||||
lager:debug("[iot_mqtt_subscriber] terminate with reason: ~p", [Reason]),
|
||||
ok;
|
||||
terminate(Reason, _State) ->
|
||||
lager:debug("[iot_mqtt_subscriber] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
22
apps/iot/src/iot_observer.erl
Normal file
22
apps/iot/src/iot_observer.erl
Normal file
@ -0,0 +1,22 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author anlicheng
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 21. 12月 2023 11:08
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_observer).
|
||||
-author("anlicheng").
|
||||
|
||||
%% API
|
||||
-export([memory_top/1, cpu_top/1, stop/0]).
|
||||
|
||||
memory_top(Interval) when is_integer(Interval) ->
|
||||
spawn(fun()->etop:start([{output, text}, {interval, Interval}, {lines, 20}, {sort, memory}])end).
|
||||
|
||||
cpu_top(Interval) when is_integer(Interval) ->
|
||||
spawn(fun()->etop:start([{output, text}, {interval, Interval}, {lines, 20}, {sort, runtime}])end).
|
||||
|
||||
stop() ->
|
||||
etop:stop().
|
||||
26
apps/iot/src/iot_router.erl
Normal file
26
apps/iot/src/iot_router.erl
Normal file
@ -0,0 +1,26 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 04. 7月 2023 11:30
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_router).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([route_uuid/3]).
|
||||
|
||||
-spec route_uuid(RouterUUID :: binary(), Fields :: list(), Timestamp :: integer()) -> no_return().
|
||||
route_uuid(RouterUUID, Fields, Timestamp) when is_binary(RouterUUID), is_list(Fields), is_integer(Timestamp) ->
|
||||
%% 查找终端设备对应的点位信息
|
||||
case redis_client:hget(RouterUUID, <<"location_code">>) of
|
||||
{ok, undefined} ->
|
||||
lager:warning("[iot_host] the north_data hget location_code, uuid: ~p, not found, fields: ~p", [RouterUUID, Fields]);
|
||||
{ok, LocationCode} when is_binary(LocationCode) ->
|
||||
iot_zd_endpoint:forward(LocationCode, Fields, Timestamp);
|
||||
{error, Reason} ->
|
||||
lager:warning("[iot_host] the north_data hget location_code uuid: ~p, get error: ~p, fields: ~p", [RouterUUID, Reason, Fields])
|
||||
end.
|
||||
49
apps/iot/src/iot_sup.erl
Normal file
49
apps/iot/src/iot_sup.erl
Normal file
@ -0,0 +1,49 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%% @doc iot top level supervisor.
|
||||
%% @end
|
||||
%%%-------------------------------------------------------------------
|
||||
|
||||
-module(iot_sup).
|
||||
|
||||
-behaviour(supervisor).
|
||||
|
||||
-export([start_link/0]).
|
||||
|
||||
-export([init/1]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
start_link() ->
|
||||
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
|
||||
|
||||
%% sup_flags() = #{strategy => strategy(), % optional
|
||||
%% intensity => non_neg_integer(), % optional
|
||||
%% period => pos_integer()} % optional
|
||||
%% child_spec() = #{id => child_id(), % mandatory
|
||||
%% start => mfargs(), % mandatory
|
||||
%% restart => restart(), % optional
|
||||
%% shutdown => shutdown(), % optional
|
||||
%% type => worker(), % optional
|
||||
%% modules => modules()} % optional
|
||||
init([]) ->
|
||||
SupFlags = #{strategy => one_for_one, intensity => 1000, period => 3600},
|
||||
Specs = [
|
||||
#{
|
||||
id => 'iot_zd_endpoint',
|
||||
start => {'iot_zd_endpoint', start_link, []},
|
||||
restart => permanent,
|
||||
shutdown => 2000,
|
||||
type => worker,
|
||||
modules => ['iot_zd_endpoint']
|
||||
}
|
||||
],
|
||||
|
||||
{ok, {SupFlags, pools() ++ Specs}}.
|
||||
|
||||
%% internal functions
|
||||
|
||||
pools() ->
|
||||
{ok, Pools} = application:get_env(iot, pools),
|
||||
lists:map(fun({Name, PoolArgs, WorkerArgs}) ->
|
||||
poolboy:child_spec(Name, [{name, {local, Name}}|PoolArgs], WorkerArgs)
|
||||
end, Pools).
|
||||
26
apps/iot/src/iot_udp_handler.erl
Normal file
26
apps/iot/src/iot_udp_handler.erl
Normal file
@ -0,0 +1,26 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 31. 8月 2023 13:48
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_udp_handler).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([start_link/2, loop/2]).
|
||||
|
||||
start_link(Transport, Peer) ->
|
||||
{ok, spawn_link(?MODULE, loop, [Transport, Peer])}.
|
||||
|
||||
loop(Transport = {udp, Server, _Sock}, Peer) ->
|
||||
receive
|
||||
{datagram, Server, <<Len:16, HostUUID:Len/binary>>} ->
|
||||
Pid = iot_host:get_pid(HostUUID),
|
||||
iot_host:heartbeat(Pid),
|
||||
loop(Transport, Peer);
|
||||
{datagram, Server, _} ->
|
||||
exit(normal)
|
||||
end.
|
||||
150
apps/iot/src/iot_util.erl
Normal file
150
apps/iot/src/iot_util.erl
Normal file
@ -0,0 +1,150 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2020, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 11. 12月 2020 上午10:57
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_util).
|
||||
-author("licheng5").
|
||||
|
||||
%% API
|
||||
-export([timestamp/0, number_format/2, current_time/0, timestamp_of_seconds/0, float_to_binary/2, int_format/2, file_uri/1]).
|
||||
-export([step/3, chunks/2, rand_bytes/1, uuid/0, md5/1, parse_mapper/1]).
|
||||
-export([json_data/1, json_error/2]).
|
||||
-export([queue_limited_in/3, assert_call/2, assert/2]).
|
||||
|
||||
%% 时间,精确到毫秒
|
||||
timestamp() ->
|
||||
{Mega, Seconds, Micro} = os:timestamp(),
|
||||
(Mega * 1000000 + Seconds) * 1000 + Micro div 1000.
|
||||
|
||||
current_time() ->
|
||||
{Mega, Seconds, _Micro} = os:timestamp(),
|
||||
Mega * 1000000 + Seconds.
|
||||
|
||||
timestamp_of_seconds() ->
|
||||
{Mega, Seconds, _Micro} = os:timestamp(),
|
||||
Mega * 1000000 + Seconds.
|
||||
|
||||
number_format(Num, _Decimals) when is_integer(Num) ->
|
||||
Num;
|
||||
number_format(Float, Decimals) when is_float(Float) ->
|
||||
list_to_float(float_to_list(Float, [{decimals, Decimals}, compact])).
|
||||
|
||||
int_format(Num, Len) when is_integer(Num), Len > 0 ->
|
||||
S = integer_to_list(Num),
|
||||
case length(S) > Len of
|
||||
true ->
|
||||
list_to_integer(lists:sublist(S, 1, Len));
|
||||
false ->
|
||||
Num
|
||||
end.
|
||||
|
||||
step(Start, End, Step) when is_integer(Start), is_integer(End), is_integer(Step), Start < End, Step > 0 ->
|
||||
step(Start, End, Step, []).
|
||||
step(Start, End, Step, Acc) when Start < End ->
|
||||
step(Start + Step, End, Step, [{Start, min(Start + Step, End)} | Acc]);
|
||||
step(_, _, _, Acc) ->
|
||||
lists:reverse(Acc).
|
||||
|
||||
%% 将数据分组
|
||||
-spec chunks(list(), integer()) -> [list()].
|
||||
chunks(List, Size) when is_list(List), is_integer(Size), Size > 0, length(List) =< Size ->
|
||||
[List];
|
||||
chunks(List, Size) when is_list(List), is_integer(Size), Size > 0 ->
|
||||
chunks0(List, Size, Size, [], []).
|
||||
chunks0([], _, _, [], AccTarget) ->
|
||||
lists:reverse(AccTarget);
|
||||
chunks0([], _, _, Target, AccTarget) ->
|
||||
lists:reverse([lists:reverse(Target) | AccTarget]);
|
||||
chunks0(List, Size, 0, Target, AccTarget) ->
|
||||
chunks0(List, Size, Size, [], [lists:reverse(Target) | AccTarget]);
|
||||
chunks0([Hd | Tail], Size, Num, Target, AccTarget) ->
|
||||
chunks0(Tail, Size, Num - 1, [Hd | Target], AccTarget).
|
||||
|
||||
json_data(Data) ->
|
||||
jiffy:encode(#{
|
||||
<<"result">> => Data
|
||||
}, [force_utf8]).
|
||||
|
||||
json_error(ErrCode, ErrMessage) when is_integer(ErrCode), is_binary(ErrMessage) ->
|
||||
jiffy:encode(#{
|
||||
<<"error">> => #{
|
||||
<<"code">> => ErrCode,
|
||||
<<"message">> => ErrMessage
|
||||
}
|
||||
}, [force_utf8]).
|
||||
|
||||
uuid() ->
|
||||
rand_bytes(16).
|
||||
|
||||
-spec rand_bytes(Size :: integer()) -> string().
|
||||
rand_bytes(Size) when is_integer(Size), Size > 0 ->
|
||||
Size1 = erlang:ceil(Size / 2),
|
||||
Bytes = crypto:strong_rand_bytes(Size1),
|
||||
S = lists:flatten([integer_to_list(E, 16) || <<E:4>> <= Bytes]),
|
||||
lists:sublist(string:to_lower(S), 1, Size).
|
||||
|
||||
queue_limited_in(Item, Q, Num) when is_integer(Num) ->
|
||||
case queue:len(Q) >= Num of
|
||||
true ->
|
||||
Q1 = queue:drop(Q),
|
||||
queue:in(Item, Q1);
|
||||
false ->
|
||||
queue:in(Item, Q)
|
||||
end.
|
||||
|
||||
assert_call(true, Fun) ->
|
||||
Fun();
|
||||
assert_call(false, _) ->
|
||||
ok.
|
||||
|
||||
-spec md5(Str :: binary()) -> binary().
|
||||
md5(Str) when is_binary(Str) ->
|
||||
list_to_binary(lists:flatten([hex(X) || <<X:4>> <= erlang:md5(Str)])).
|
||||
|
||||
hex(N) when N < 10 ->
|
||||
$0 + N;
|
||||
hex(N) ->
|
||||
$a + (N - 10).
|
||||
|
||||
%% 转换映射器
|
||||
-spec parse_mapper(Mapper :: binary() | string()) -> error | {ok, F :: fun((binary(), any()) -> any())}.
|
||||
parse_mapper(Mapper) when is_binary(Mapper) ->
|
||||
parse_mapper(binary_to_list(Mapper));
|
||||
parse_mapper(Mapper) when is_list(Mapper) ->
|
||||
{ok, Tokens, _} = erl_scan:string(Mapper),
|
||||
{ok, ExprList} = erl_parse:parse_exprs(Tokens),
|
||||
{value, F, _} = erl_eval:exprs(ExprList, []),
|
||||
case is_function(F, 2) orelse is_function(F, 3) of
|
||||
true ->
|
||||
{ok, F};
|
||||
false ->
|
||||
error
|
||||
end.
|
||||
|
||||
-spec float_to_binary(Num :: number(), integer()) -> binary().
|
||||
float_to_binary(V, _) when is_integer(V) ->
|
||||
integer_to_binary(V);
|
||||
float_to_binary(V, Decimals) when is_float(V), is_integer(Decimals) ->
|
||||
S = float_to_list(V, [{decimals, Decimals}, compact]),
|
||||
list_to_binary(S).
|
||||
|
||||
assert(true, _) ->
|
||||
ok;
|
||||
assert(false, F) when is_function(F) ->
|
||||
F();
|
||||
assert(false, Msg) ->
|
||||
throw(Msg).
|
||||
|
||||
|
||||
-spec file_uri(Filename :: binary()) -> error | {ok, FileUri :: binary()}.
|
||||
file_uri(Filename) when is_binary(Filename) ->
|
||||
case binary:split(Filename, <<"-">>, [global]) of
|
||||
[Year, Month, Day | _] ->
|
||||
{ok, <<"https://lgsiot.njau.edu.cn/upload/", Year/binary, $/, Month/binary, $/, Day/binary, $/, Filename/binary>>};
|
||||
_ ->
|
||||
error
|
||||
end.
|
||||
26
apps/iot/src/mnesia/mnesia_id_generator.erl
Normal file
26
apps/iot/src/mnesia/mnesia_id_generator.erl
Normal file
@ -0,0 +1,26 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 04. 7月 2023 12:31
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mnesia_id_generator).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([next_id/1, create_table/0]).
|
||||
|
||||
create_table() ->
|
||||
%% id生成器
|
||||
mnesia:create_table(id_generator, [
|
||||
{attributes, record_info(fields, id_generator)},
|
||||
{record_name, id_generator},
|
||||
{disc_copies, [node()]},
|
||||
{type, ordered_set}
|
||||
]).
|
||||
|
||||
next_id(Tab) when is_atom(Tab) ->
|
||||
mnesia:dirty_update_counter(id_generator, Tab, 1).
|
||||
61
apps/iot/src/mnesia/mnesia_queue.erl
Normal file
61
apps/iot/src/mnesia/mnesia_queue.erl
Normal file
@ -0,0 +1,61 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 7月 2023 10:40
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mnesia_queue).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-define(TAB_NAME, 'queue_data:zhongdian').
|
||||
|
||||
%% API
|
||||
-export([create_table/0]).
|
||||
-export([insert/1, delete/1, table_size/0, dirty_fetch_next/1]).
|
||||
|
||||
create_table() ->
|
||||
%% 数据转发缓存表
|
||||
mnesia:create_table(?TAB_NAME, [
|
||||
{attributes, record_info(fields, north_data)},
|
||||
{record_name, north_data},
|
||||
{disc_copies, [node()]},
|
||||
{type, ordered_set}
|
||||
]).
|
||||
|
||||
-spec insert(#north_data{}) -> ok | {error, Reason :: any()}.
|
||||
insert(Item = #north_data{}) ->
|
||||
Id = mnesia_id_generator:next_id(?TAB_NAME),
|
||||
NItem = Item#north_data{id = Id},
|
||||
case mnesia:transaction(fun() -> mnesia:write(?TAB_NAME, NItem, write) end) of
|
||||
{atomic, ok} ->
|
||||
ok;
|
||||
{aborted, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec delete(Key :: any()) -> ok | {error, Reason :: any()}.
|
||||
delete(Key) when is_integer(Key) ->
|
||||
case mnesia:transaction(fun() -> mnesia:delete(?TAB_NAME, Key, write) end) of
|
||||
{atomic, ok} ->
|
||||
ok;
|
||||
{aborted, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec table_size() -> integer().
|
||||
table_size() ->
|
||||
mnesia:table_info(?TAB_NAME, size).
|
||||
|
||||
-spec dirty_fetch_next(Cursor :: integer()) ->
|
||||
{ok, NCursor :: integer(), Item :: any()} | '$end_of_table'.
|
||||
dirty_fetch_next(Cursor) when is_integer(Cursor) ->
|
||||
case mnesia:dirty_next(?TAB_NAME, Cursor) of
|
||||
'$end_of_table' ->
|
||||
'$end_of_table';
|
||||
NextKey ->
|
||||
[Item] = mnesia:dirty_read(?TAB_NAME, NextKey),
|
||||
{ok, NextKey, Item}
|
||||
end.
|
||||
103
apps/iot/src/mnesia/mnesia_totalizator.erl
Normal file
103
apps/iot/src/mnesia/mnesia_totalizator.erl
Normal file
@ -0,0 +1,103 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 26. 7月 2023 10:40
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mnesia_totalizator).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
-include_lib("stdlib/include/qlc.hrl").
|
||||
|
||||
-define(TAB_NAME, totalizator).
|
||||
|
||||
%% API
|
||||
-export([create_table/0]).
|
||||
-export([increment_success/2, increment_fail/2, delete/2, table_size/0, query/2]).
|
||||
|
||||
create_table() ->
|
||||
%% id生成器
|
||||
mnesia:create_table(?TAB_NAME, [
|
||||
{attributes, record_info(fields, totalizator)},
|
||||
{record_name, totalizator},
|
||||
{disc_copies, [node()]},
|
||||
{type, ordered_set}
|
||||
]).
|
||||
|
||||
-spec query(SceneIds :: [integer()], Dates :: [calendar:date()]) -> [map()].
|
||||
query(SceneIds, Dates) when is_list(SceneIds), is_list(Dates) ->
|
||||
lists:map(fun(Date) ->
|
||||
Scenes = lists:map(fun(SceneId) ->
|
||||
Key = {SceneId, Date},
|
||||
case mnesia:dirty_read(?TAB_NAME, Key) of
|
||||
[R | _] ->
|
||||
to_map(R);
|
||||
[] ->
|
||||
#{<<"scene_id">> => SceneId, <<"success_num">> => 0, <<"fail_num">> => 0}
|
||||
end
|
||||
end, SceneIds),
|
||||
#{<<"date">> => format_date(Date), <<"scenes">> => Scenes}
|
||||
end, Dates).
|
||||
|
||||
-spec increment_success(SceneId :: integer(), IncNum :: integer()) -> ok | {error, Reason :: any()}.
|
||||
increment_success(SceneId, IncNum) when is_integer(SceneId), is_integer(IncNum) ->
|
||||
increment(SceneId, success, IncNum).
|
||||
|
||||
-spec increment_fail(SceneId :: integer(), IncNum :: integer()) -> ok | {error, Reason :: any()}.
|
||||
increment_fail(SceneId, IncNum) when is_integer(SceneId), is_integer(IncNum) ->
|
||||
increment(SceneId, fail, IncNum).
|
||||
|
||||
-spec increment(SceneId :: integer(), Type :: atom(), IncNum :: integer()) -> ok | {error, Reason :: any()}.
|
||||
increment(SceneId, Type, IncNum) when is_integer(SceneId), is_integer(IncNum), is_atom(Type) ->
|
||||
{Date, _} = calendar:local_time(),
|
||||
Key = {SceneId, Date},
|
||||
Fun = fun() ->
|
||||
case mnesia:read(?TAB_NAME, Key) of
|
||||
[R = #totalizator{option = Option = #option{success_num = SuccessNum, fail_num = FailNum}} | _] ->
|
||||
NOption = case Type of
|
||||
success ->
|
||||
Option#option{success_num = SuccessNum + IncNum};
|
||||
fail ->
|
||||
Option#option{fail_num = FailNum + IncNum}
|
||||
end,
|
||||
NR = R#totalizator{option = NOption},
|
||||
mnesia:write(?TAB_NAME, NR, write);
|
||||
[] ->
|
||||
Option = case Type of
|
||||
success ->
|
||||
#option{success_num = IncNum};
|
||||
fail ->
|
||||
#option{fail_num = IncNum}
|
||||
end,
|
||||
R = #totalizator{key = Key, scene_id = SceneId, date = Date, option = Option},
|
||||
mnesia:write(?TAB_NAME, R, write)
|
||||
end
|
||||
end,
|
||||
|
||||
case mnesia:transaction(Fun) of
|
||||
{atomic, ok} ->
|
||||
ok;
|
||||
{aborted, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec delete(SceneId :: integer(), Date :: calendar:date()) -> ok | {error, Reason :: any()}.
|
||||
delete(SceneId, Date) when is_integer(SceneId), is_tuple(Date) ->
|
||||
case mnesia:transaction(fun() -> mnesia:delete(?TAB_NAME, {SceneId, Date}, write) end) of
|
||||
{atomic, ok} ->
|
||||
ok;
|
||||
{aborted, Reason} ->
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec table_size() -> integer().
|
||||
table_size() ->
|
||||
mnesia:table_info(?TAB_NAME, size).
|
||||
|
||||
to_map(#totalizator{scene_id = SceneId, option = #option{success_num = SuccessNum, fail_num = FailNum}}) ->
|
||||
#{<<"scene_id">> => SceneId, <<"success_num">> => SuccessNum, <<"fail_num">> => FailNum}.
|
||||
|
||||
format_date({Year, Month, Day}) ->
|
||||
iolist_to_binary(io_lib:format("~b-~2..0b-~2..0b", [Year, Month, Day])).
|
||||
25
apps/iot/src/mocker/eval_test.erl
Normal file
25
apps/iot/src/mocker/eval_test.erl
Normal file
@ -0,0 +1,25 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 17. 7月 2023 15:11
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(eval_test).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([test/0]).
|
||||
|
||||
test() ->
|
||||
|
||||
{ok, Content} = file:read_file("/tmp/test.erl"),
|
||||
|
||||
{ok, Tokens, _} = erl_scan:string(binary_to_list(Content)),
|
||||
{ok, ExprList} = erl_parse:parse_exprs(Tokens),
|
||||
|
||||
{value, F, _NewBindings} = erl_eval:exprs(ExprList, []),
|
||||
F(#{name => <<"test">>}).
|
||||
|
||||
|
||||
87
apps/iot/src/mocker/iot_mock.erl
Normal file
87
apps/iot/src/mocker/iot_mock.erl
Normal file
@ -0,0 +1,87 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 14. 2月 2023 20:32
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(iot_mock).
|
||||
-author("licheng5").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([rsa_encode/1]).
|
||||
-export([insert_services/1]).
|
||||
-export([test_mqtt/0, test_influxdb/0]).
|
||||
|
||||
test_influxdb() ->
|
||||
UUID = <<"device123123">>,
|
||||
|
||||
lists:foreach(fun(Id) ->
|
||||
Point = influx_point:new(<<"shui_biao">>,
|
||||
[{<<"uuid">>, UUID}, {<<"service_name">>, <<"shui_biao">>}],
|
||||
[{<<"cost">>, Id}],
|
||||
iot_util:timestamp()),
|
||||
|
||||
poolboy:transaction(influx_pool, fun(Pid) ->
|
||||
influx_client:write(Pid, <<"iot">>, <<"iot">>, [Point])
|
||||
end)
|
||||
end, lists:seq(1, 100)).
|
||||
|
||||
test_mqtt() ->
|
||||
iot_zd_endpoint:forward(<<"location_code_test123">>, [
|
||||
#{<<"key">> => <<"name">>, <<"value">> => <<"anlicheng">>},
|
||||
#{<<"key">> => <<"age">>, <<"value">> => 30},
|
||||
#{<<"key">> => <<"flow">>, <<"value">> => 30}
|
||||
], iot_util:timestamp_of_seconds()).
|
||||
|
||||
insert_services(Num) ->
|
||||
lists:foreach(fun(Id) ->
|
||||
Res = mysql_pool:insert(mysql_iot, <<"micro_service">>,
|
||||
#{
|
||||
<<"name">> => <<"微服务"/utf8, (integer_to_binary(Id))/binary>>,
|
||||
<<"code">> => <<"1223423423423423"/utf8>>,
|
||||
<<"type">> => 1,
|
||||
<<"version">> => <<"v1.0">>,
|
||||
<<"url">> => <<"https://www.baidu.com">>,
|
||||
<<"detail">> => <<"这是一个关于测试的微服务"/utf8>>
|
||||
}, false),
|
||||
lager:debug("insert service result is: ~p", [Res])
|
||||
end, lists:seq(1, Num)).
|
||||
|
||||
rsa_encode(Data) when is_binary(Data) ->
|
||||
%% 读取相关配置
|
||||
PublicPemFile = "/tmp/keys/public.pem",
|
||||
|
||||
%% 私钥保存解析后的
|
||||
{ok, PubBin} = file:read_file(PublicPemFile),
|
||||
lager:debug("pub bin is: ~p", [PubBin]),
|
||||
[Pub] = public_key:pem_decode(PubBin),
|
||||
lager:debug("pub pem bin is: ~p", [Pub]),
|
||||
PubKey = public_key:pem_entry_decode(Pub),
|
||||
lager:debug("the public key is: ~p", [PubKey]),
|
||||
|
||||
EncData = public_key:encrypt_public(Data, PubKey),
|
||||
lager:debug("enc data is: ~p", [EncData]),
|
||||
|
||||
rsa_decode(EncData),
|
||||
|
||||
ok.
|
||||
|
||||
rsa_decode(EncData) when is_binary(EncData) ->
|
||||
%% 读取相关配置
|
||||
PublicPemFile = "/tmp/keys/pri.pem",
|
||||
|
||||
%% 私钥保存解析后的
|
||||
{ok, PubBin} = file:read_file(PublicPemFile),
|
||||
lager:debug("pub bin is: ~p", [PubBin]),
|
||||
[Pub] = public_key:pem_decode(PubBin),
|
||||
lager:debug("pub pem bin is: ~p", [Pub]),
|
||||
PubKey = public_key:pem_entry_decode(Pub),
|
||||
lager:debug("the public key is: ~p", [PubKey]),
|
||||
|
||||
PlainData = public_key:decrypt_private(EncData, PubKey),
|
||||
lager:debug("plain data is: ~p", [PlainData]),
|
||||
|
||||
ok.
|
||||
48
apps/iot/src/mysql/mysql_pool.erl
Normal file
48
apps/iot/src/mysql/mysql_pool.erl
Normal file
@ -0,0 +1,48 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2018, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 29. 九月 2018 17:01
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mysql_pool).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([get_row/2, get_row/3, get_all/2, get_all/3]).
|
||||
-export([update/4, update_by/2, update_by/3, insert/4]).
|
||||
|
||||
%% 从数据库中查找一行记录
|
||||
-spec get_row(Pool :: atom(), Sql::binary()) -> {ok, Record::map()} | undefined.
|
||||
get_row(Pool, Sql) when is_atom(Pool), is_binary(Sql) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:get_row(ConnPid, Sql) end).
|
||||
|
||||
-spec get_row(Pool :: atom(), Sql::binary(), Params::list()) -> {ok, Record::map()} | undefined.
|
||||
get_row(Pool, Sql, Params) when is_atom(Pool), is_binary(Sql), is_list(Params) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:get_row(ConnPid, Sql, Params) end).
|
||||
|
||||
-spec get_all(Pool :: atom(), Sql::binary()) -> {ok, Rows::list()} | {error, Reason :: any()}.
|
||||
get_all(Pool, Sql) when is_atom(Pool), is_binary(Sql) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:get_all(ConnPid, Sql) end).
|
||||
|
||||
-spec get_all(Pool :: atom(), Sql::binary(), Params::list()) -> {ok, Rows::list()} | {error, Reason::any()}.
|
||||
get_all(Pool, Sql, Params) when is_atom(Pool), is_binary(Sql), is_list(Params) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:get_all(ConnPid, Sql, Params) end).
|
||||
|
||||
-spec insert(Pool :: atom(), Table :: binary(), Fields :: map() | list(), boolean()) ->
|
||||
ok | {ok, InsertId :: integer()} | {error, Reason :: any()}.
|
||||
insert(Pool, Table, Fields, FetchInsertId) when is_atom(Pool), is_binary(Table), is_list(Fields); is_map(Fields), is_boolean(FetchInsertId) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:insert(ConnPid, Table, Fields, FetchInsertId) end).
|
||||
|
||||
-spec update_by(Pool :: atom(), UpdateSql :: binary()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
update_by(Pool, UpdateSql) when is_atom(Pool), is_binary(UpdateSql) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:update_by(ConnPid, UpdateSql) end).
|
||||
|
||||
-spec update_by(Pool :: atom(), UpdateSql :: binary(), Params :: list()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
update_by(Pool, UpdateSql, Params) when is_atom(Pool), is_binary(UpdateSql) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:update_by(ConnPid, UpdateSql, Params) end).
|
||||
|
||||
-spec update(Pool :: atom(), Table :: binary(), Fields :: map(), WhereFields :: map()) -> {ok, AffectedRows::integer()} | {error, Reason::any()}.
|
||||
update(Pool, Table, Fields, WhereFields) when is_atom(Pool), is_binary(Table), is_map(Fields), is_map(WhereFields) ->
|
||||
poolboy:transaction(Pool, fun(ConnPid) -> mysql_provider:update(ConnPid, Table, Fields, WhereFields) end).
|
||||
144
apps/iot/src/mysql/mysql_provider.erl
Normal file
144
apps/iot/src/mysql/mysql_provider.erl
Normal file
@ -0,0 +1,144 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2018, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 29. 九月 2018 17:01
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mysql_provider).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([get_row/2, get_row/3, get_all/2, get_all/3]).
|
||||
-export([update/4, update_by/2, update_by/3, insert/4]).
|
||||
|
||||
%% 从数据库中查找一行记录
|
||||
-spec get_row(ConnPid :: pid(), Sql::binary()) -> {ok, Record::map()} | undefined.
|
||||
get_row(ConnPid, Sql) when is_pid(ConnPid), is_binary(Sql) ->
|
||||
lager:debug("[mysql_client] get_row sql is: ~p", [Sql]),
|
||||
case mysql:query(ConnPid, Sql) of
|
||||
{ok, Names, [Row | _]} ->
|
||||
{ok, maps:from_list(lists:zip(Names, Row))};
|
||||
{ok, _, []} ->
|
||||
undefined;
|
||||
Error ->
|
||||
lager:warning("[mysql_client] get error: ~p", [Error]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
-spec get_row(ConnPid :: pid(), Sql::binary(), Params::list()) -> {ok, Record::map()} | undefined.
|
||||
get_row(ConnPid, Sql, Params) when is_pid(ConnPid), is_binary(Sql), is_list(Params) ->
|
||||
lager:debug("[mysql_client] get_row sql is: ~p, params: ~p", [Sql, Params]),
|
||||
case mysql:query(ConnPid, Sql, Params) of
|
||||
{ok, Names, [Row | _]} ->
|
||||
{ok, maps:from_list(lists:zip(Names, Row))};
|
||||
{ok, _, []} ->
|
||||
undefined;
|
||||
Error ->
|
||||
lager:warning("[mysql_client] get error: ~p", [Error]),
|
||||
undefined
|
||||
end.
|
||||
|
||||
-spec get_all(ConnPid :: pid(), Sql::binary()) -> {ok, Rows::list()} | {error, Reason :: any()}.
|
||||
get_all(ConnPid, Sql) when is_pid(ConnPid), is_binary(Sql) ->
|
||||
lager:debug("[mysql_client] get_all sql is: ~p", [Sql]),
|
||||
case mysql:query(ConnPid, Sql) of
|
||||
{ok, Names, Rows} ->
|
||||
{ok, lists:map(fun(Row) -> maps:from_list(lists:zip(Names, Row)) end, Rows)};
|
||||
{error, Reason} ->
|
||||
lager:warning("[mysql_client] get error: ~p", [Reason]),
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec get_all(ConnPid :: pid(), Sql::binary(), Params::list()) -> {ok, Rows::list()} | {error, Reason::any()}.
|
||||
get_all(ConnPid, Sql, Params) when is_pid(ConnPid), is_binary(Sql), is_list(Params) ->
|
||||
lager:debug("[mysql_client] get_all sql is: ~p, params: ~p", [Sql, Params]),
|
||||
case mysql:query(ConnPid, Sql, Params) of
|
||||
{ok, Names, Rows} ->
|
||||
{ok, lists:map(fun(Row) -> maps:from_list(lists:zip(Names, Row)) end, Rows)};
|
||||
{error, Reason} ->
|
||||
lager:warning("[mysql_client] get error: ~p", [Reason]),
|
||||
{error, Reason}
|
||||
end.
|
||||
|
||||
-spec insert(ConnPid :: pid(), Table :: binary(), Fields :: map() | list(), boolean()) ->
|
||||
ok | {ok, InsertId :: integer()} | {error, Reason :: any()}.
|
||||
insert(ConnPid, Table, Fields, FetchInsertId) when is_pid(ConnPid), is_binary(Table), is_map(Fields), is_boolean(FetchInsertId) ->
|
||||
insert(ConnPid, Table, maps:to_list(Fields), FetchInsertId);
|
||||
insert(ConnPid, Table, Fields, FetchInsertId) when is_pid(ConnPid), is_binary(Table), is_list(Fields), is_boolean(FetchInsertId) ->
|
||||
{Keys, Values} = kvs(Fields),
|
||||
|
||||
FieldSql = iolist_to_binary(lists:join(<<", ">>, Keys)),
|
||||
Placeholders = lists:duplicate(length(Keys), <<"?">>),
|
||||
ValuesPlaceholder = iolist_to_binary(lists:join(<<", ">>, Placeholders)),
|
||||
|
||||
Sql = <<"INSERT INTO ", Table/binary, "(", FieldSql/binary, ") VALUES(", ValuesPlaceholder/binary, ")">>,
|
||||
lager:debug("[mysql_client] insert sql is: ~p, params: ~p", [Sql, Values]),
|
||||
case mysql:query(ConnPid, Sql, Values) of
|
||||
ok ->
|
||||
case FetchInsertId of
|
||||
true ->
|
||||
InsertId = mysql:insert_id(ConnPid),
|
||||
{ok, InsertId};
|
||||
false ->
|
||||
ok
|
||||
end;
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
-spec update_by(ConnPid :: pid(), UpdateSql :: binary()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
update_by(ConnPid, UpdateSql) when is_pid(ConnPid), is_binary(UpdateSql) ->
|
||||
lager:debug("[mysql_client] updateBySql sql: ~p", [UpdateSql]),
|
||||
case mysql:query(ConnPid, UpdateSql) of
|
||||
ok ->
|
||||
AffectedRows = mysql:affected_rows(ConnPid),
|
||||
{ok, AffectedRows};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
-spec update_by(ConnPid :: pid(), UpdateSql :: binary(), Params :: list()) -> {ok, AffectedRows :: integer()} | {error, Reason :: any()}.
|
||||
update_by(ConnPid, UpdateSql, Params) when is_pid(ConnPid), is_binary(UpdateSql) ->
|
||||
lager:debug("[mysql_client] updateBySql sql: ~p, params: ~p", [UpdateSql, Params]),
|
||||
case mysql:query(ConnPid, UpdateSql, Params) of
|
||||
ok ->
|
||||
AffectedRows = mysql:affected_rows(ConnPid),
|
||||
{ok, AffectedRows};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
-spec update(ConnPid :: pid(), Sql :: binary(), Fields :: map(), WhereFields :: map()) ->
|
||||
{ok, AffectedRows::integer()} | {error, Reason::any()}.
|
||||
update(ConnPid, Table, Fields, WhereFields) when is_pid(ConnPid), is_binary(Table), is_map(Fields), is_map(WhereFields) ->
|
||||
%% 拼接set
|
||||
{SetKeys, SetVals} = kvs(Fields),
|
||||
SetKeys1 = lists:map(fun(K) when is_binary(K) -> <<"`", K/binary, "` = ?">> end, SetKeys),
|
||||
SetSql = iolist_to_binary(lists:join(<<", ">>, SetKeys1)),
|
||||
|
||||
%% 拼接where
|
||||
{WhereKeys, WhereVals} = kvs(WhereFields),
|
||||
WhereKeys1 = lists:map(fun(K) when is_binary(K) -> <<"`", K/binary, "` = ?">> end, WhereKeys),
|
||||
WhereSql = iolist_to_binary(lists:join(<<" AND ">>, WhereKeys1)),
|
||||
|
||||
Params = SetVals ++ WhereVals,
|
||||
|
||||
Sql = <<"UPDATE ", Table/binary, " SET ", SetSql/binary, " WHERE ", WhereSql/binary>>,
|
||||
lager:debug("[mysql_client] update sql is: ~p, params: ~p", [Sql, Params]),
|
||||
case mysql:query(ConnPid, Sql, Params) of
|
||||
ok ->
|
||||
AffectedRows = mysql:affected_rows(ConnPid),
|
||||
{ok, AffectedRows};
|
||||
Error ->
|
||||
lager:error("[mysql_client] update sql: ~p, params: ~p, get a error: ~p", [Sql, Params, Error]),
|
||||
Error
|
||||
end.
|
||||
|
||||
-spec kvs(Fields :: map() | list()) -> {Keys :: list(), Values :: list()}.
|
||||
kvs(Fields) when is_map(Fields) ->
|
||||
kvs(maps:to_list(Fields));
|
||||
kvs(Fields) when is_list(Fields) ->
|
||||
{Keys0, Values0} = lists:foldl(fun({K, V}, {Acc0, Acc1}) -> {[K|Acc0], [V|Acc1]} end, {[], []}, Fields),
|
||||
{lists:reverse(Keys0), lists:reverse(Values0)}.
|
||||
107
apps/iot/src/postman/broker_postman.erl
Normal file
107
apps/iot/src/postman/broker_postman.erl
Normal file
@ -0,0 +1,107 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%% 支持以多进程池的方式工作
|
||||
%%% @end
|
||||
%%% Created : 07. 8月 2023 10:15
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(broker_postman).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/3]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-record(state, {
|
||||
pool_pid :: pid()
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link(Mod :: atom(), WorkerArgs :: list(), PoolSize :: integer()) ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link(Mod, WorkerArgs, PoolSize) when is_atom(Mod), is_list(WorkerArgs), is_integer(PoolSize) ->
|
||||
gen_server:start_link(?MODULE, [Mod, WorkerArgs, PoolSize], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([Mod, WorkerArgs, PoolSize]) ->
|
||||
%% 启动工作的线程池
|
||||
{ok, PoolPid} = poolboy:start_link([{size, PoolSize}, {max_overflow, PoolSize}, {worker_module, Mod}], WorkerArgs),
|
||||
|
||||
{ok, #state{pool_pid = PoolPid}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Request, _From, State = #state{}) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast(_Request, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info({post, ReceiverPid, PostData}, State = #state{pool_pid = PoolPid}) ->
|
||||
poolboy:transaction(PoolPid, fun(Pid) -> Pid ! {post, ReceiverPid, PostData} end),
|
||||
{noreply, State};
|
||||
|
||||
handle_info(stop, State = #state{pool_pid = PoolPid}) ->
|
||||
catch poolboy:stop(PoolPid),
|
||||
{stop, normal, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(Reason, _State = #state{}) ->
|
||||
lager:debug("[broker_postman] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
118
apps/iot/src/postman/http_postman.erl
Normal file
118
apps/iot/src/postman/http_postman.erl
Normal file
@ -0,0 +1,118 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 06. 7月 2023 16:23
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(http_postman).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/1]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-record(state, {
|
||||
url :: binary()
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
%% 为了方便通过poolboy调用,采用的proplist
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link(Args :: proplists:proplist()) ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link(Args) when is_list(Args) ->
|
||||
gen_server:start_link(?MODULE, [Args], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([Args]) ->
|
||||
Url = proplists:get_value(url, Args),
|
||||
{ok, #state{url = Url}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Request, _From, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast(_Request, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info({post, ReceiverPid, #post_data{id = Id, body = Body}}, State = #state{url = Url}) ->
|
||||
Headers = [
|
||||
{<<"content-type">>, <<"application/json">>}
|
||||
],
|
||||
case hackney:request(post, Url, Headers, Body) of
|
||||
{ok, 200, _, ClientRef} ->
|
||||
{ok, RespBody} = hackney:body(ClientRef),
|
||||
hackney:close(ClientRef),
|
||||
ReceiverPid ! {ack, Id, {ok, Body, RespBody}},
|
||||
{noreply, State};
|
||||
{ok, HttpCode, _, ClientRef} ->
|
||||
{ok, RespBody} = hackney:body(ClientRef),
|
||||
hackney:close(ClientRef),
|
||||
ReceiverPid ! {ack, Id, {error, Body, {HttpCode, RespBody}}},
|
||||
{noreply, State};
|
||||
{error, Reason} ->
|
||||
ReceiverPid ! {ack, Id, {error, Body, Reason}},
|
||||
{noreply, State}
|
||||
end.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(Reason, #state{url = Url}) ->
|
||||
lager:debug("[http_postman] url: ~p, terminate with reason: ~p", [Url, Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
145
apps/iot/src/postman/mqtt_postman.erl
Normal file
145
apps/iot/src/postman/mqtt_postman.erl
Normal file
@ -0,0 +1,145 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 12. 3月 2023 21:27
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mqtt_postman).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/3]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-record(state, {
|
||||
conn_pid :: pid(),
|
||||
topic :: binary(),
|
||||
qos = 0 :: integer(),
|
||||
inflight = #{}
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link(Opts :: list(), Topic :: binary(), Qos :: integer()) ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link(Opts, Topic, Qos) when is_list(Opts), is_binary(Topic), Qos == 0; Qos == 1; Qos == 2 ->
|
||||
gen_server:start_link(?MODULE, [Opts, Topic, Qos], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([Opts, Topic, Qos]) ->
|
||||
Opts1 = [{owner, self()} | Opts],
|
||||
{ok, ConnPid} = emqtt:start_link(Opts1),
|
||||
lager:debug("[mqtt_postman] start connect, options: ~p", [Opts1]),
|
||||
{ok, _} = emqtt:connect(ConnPid, 5000),
|
||||
lager:debug("[mqtt_postman] connect success, pid: ~p", [ConnPid]),
|
||||
|
||||
{ok, #state{conn_pid = ConnPid, topic = Topic, qos = Qos}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Info, _From, State) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast(_Info, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info({disconnected, ReasonCode, Properties}, State = #state{}) ->
|
||||
lager:debug("[mqtt_postman] Recv a DISONNECT packet - ReasonCode: ~p, Properties: ~p", [ReasonCode, Properties]),
|
||||
{stop, disconnected, State};
|
||||
handle_info({publish, Message = #{packet_id := _PacketId, payload := Payload}}, State = #state{conn_pid = _ConnPid}) ->
|
||||
lager:debug("[mqtt_postman] Recv a publish packet: ~p, payload: ~p", [Message, Payload]),
|
||||
{noreply, State};
|
||||
handle_info({puback, #{packet_id := PacketId}}, State = #state{inflight = Inflight}) ->
|
||||
case maps:take(PacketId, Inflight) of
|
||||
{{Id, ReceiverPid, AssocMessage}, RestInflight} ->
|
||||
ReceiverPid ! {ack, Id, AssocMessage},
|
||||
{noreply, State#state{inflight = RestInflight}};
|
||||
error ->
|
||||
{noreply, State}
|
||||
end;
|
||||
|
||||
%% 转发信息
|
||||
handle_info({post, ReceiverPid, #post_data{id = Id, location_code = LocationCode, body = Message}}, State = #state{conn_pid = ConnPid, inflight = InFlight, topic = Topic0, qos = Qos}) ->
|
||||
Topic = re:replace(Topic0, <<"\\${location_code}">>, LocationCode, [global, {return, binary}]),
|
||||
lager:debug("[mqtt_postman] will publish topic: ~p, message: ~ts, qos: ~p", [Topic, Message, Qos]),
|
||||
case emqtt:publish(ConnPid, Topic, #{}, Message, [{qos, Qos}, {retain, true}]) of
|
||||
ok ->
|
||||
ReceiverPid ! {ack, Id, Message},
|
||||
{noreply, State};
|
||||
{ok, PacketId} ->
|
||||
{noreply, State#state{inflight = maps:put(PacketId, {Id, ReceiverPid, Message}, InFlight)}};
|
||||
{error, Reason} ->
|
||||
lager:warning("[mqtt_postman] send message to topic: ~p, get error: ~p", [Topic, Reason]),
|
||||
{stop, Reason, State}
|
||||
end;
|
||||
handle_info(stop, State) ->
|
||||
{stop, normal, State};
|
||||
|
||||
handle_info(Info, State = #state{}) ->
|
||||
lager:notice("[mqtt_postman] get unknown info: ~p", [Info]),
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(Reason, #state{conn_pid = ConnPid}) when is_pid(ConnPid) ->
|
||||
ok = emqtt:disconnect(ConnPid),
|
||||
lager:debug("[mqtt_postman] terminate with reason: ~p", [Reason]),
|
||||
ok;
|
||||
terminate(Reason, _State) ->
|
||||
lager:debug("[mqtt_postman] terminate with reason: ~p", [Reason]),
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
116
apps/iot/src/postman/mysql_postman.erl
Normal file
116
apps/iot/src/postman/mysql_postman.erl
Normal file
@ -0,0 +1,116 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2023, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 07. 8月 2023 10:15
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(mysql_postman).
|
||||
-author("aresei").
|
||||
-include("iot.hrl").
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/1]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
-record(state, {
|
||||
mysql_pid :: pid(),
|
||||
table :: binary()
|
||||
}).
|
||||
|
||||
%%%===================================================================
|
||||
%%% API
|
||||
%%%===================================================================
|
||||
|
||||
%% @doc Spawns the server and registers the local name (unique)
|
||||
-spec(start_link(Args :: list()) ->
|
||||
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
|
||||
start_link(Args) when is_list(Args) ->
|
||||
gen_server:start_link(?MODULE, [Args], []).
|
||||
|
||||
%%%===================================================================
|
||||
%%% gen_server callbacks
|
||||
%%%===================================================================
|
||||
|
||||
%% @private
|
||||
%% @doc Initializes the server
|
||||
-spec(init(Args :: term()) ->
|
||||
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term()} | ignore).
|
||||
init([Args]) ->
|
||||
MysqlOpts = proplists:get_value(mysql_opts, Args),
|
||||
Table = proplists:get_value(table, Args),
|
||||
|
||||
{ok, ConnPid} = mysql:start_link(MysqlOpts),
|
||||
|
||||
{ok, #state{mysql_pid = ConnPid, table = Table}}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling call messages
|
||||
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
|
||||
State :: #state{}) ->
|
||||
{reply, Reply :: term(), NewState :: #state{}} |
|
||||
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_call(_Request, _From, State = #state{}) ->
|
||||
{reply, ok, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling cast messages
|
||||
-spec(handle_cast(Request :: term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_cast(_Request, State = #state{}) ->
|
||||
{noreply, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc Handling all non call/cast messages
|
||||
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
|
||||
{noreply, NewState :: #state{}} |
|
||||
{noreply, NewState :: #state{}, timeout() | hibernate} |
|
||||
{stop, Reason :: term(), NewState :: #state{}}).
|
||||
handle_info({post, ReceiverPid, #post_data{id = Id, body = Fields}}, State = #state{mysql_pid = ConnPid, table = Table}) when is_list(Fields) ->
|
||||
case catch mysql_provider:insert(ConnPid, Table, Fields, false) of
|
||||
ok ->
|
||||
ReceiverPid ! {ack, Id};
|
||||
Error ->
|
||||
lager:debug("[mysql_postman] insert table: ~p, res is: ~p", [Table, Error])
|
||||
end,
|
||||
{noreply, State};
|
||||
|
||||
handle_info(stop, State = #state{mysql_pid = ConnPid}) ->
|
||||
mysql:stop(ConnPid),
|
||||
{stop, normal, State}.
|
||||
|
||||
%% @private
|
||||
%% @doc This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any
|
||||
%% necessary cleaning up. When it returns, the gen_server terminates
|
||||
%% with Reason. The return value is ignored.
|
||||
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
|
||||
State :: #state{}) -> term()).
|
||||
terminate(_Reason, _State = #state{}) ->
|
||||
ok.
|
||||
|
||||
%% @private
|
||||
%% @doc Convert process state when code is changed
|
||||
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
|
||||
Extra :: term()) ->
|
||||
{ok, NewState :: #state{}} | {error, Reason :: term()}).
|
||||
code_change(_OldVsn, State = #state{}, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%%===================================================================
|
||||
%%% Internal functions
|
||||
%%%===================================================================
|
||||
42
apps/iot/src/redis/redis_client.erl
Executable file
42
apps/iot/src/redis/redis_client.erl
Executable file
@ -0,0 +1,42 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author aresei
|
||||
%%% @copyright (C) 2017, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 21. 四月 2017 13:33
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(redis_client).
|
||||
-author("aresei").
|
||||
|
||||
%% API
|
||||
-export([hget/2, hgetall/1]).
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% HashTable处理
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
-spec hget(Key :: binary(), Field :: binary()) -> {ok, Val :: any()} | {error, Reason :: binary()}.
|
||||
hget(Key, Field) when is_binary(Key), is_binary(Field) ->
|
||||
poolboy:transaction(redis_pool, fun(Conn) -> eredis:q(Conn, ["HGET", Key, Field]) end).
|
||||
|
||||
-spec hgetall(Key :: binary()) -> {ok, Fields :: map()} | {error, Reason :: binary()}.
|
||||
hgetall(Key) when is_binary(Key) ->
|
||||
poolboy:transaction(redis_pool, fun(Conn) ->
|
||||
case eredis:q(Conn, ["HGETALL", Key]) of
|
||||
{ok, Items} ->
|
||||
{ok, to_map(Items)};
|
||||
Error ->
|
||||
Error
|
||||
end
|
||||
end).
|
||||
|
||||
|
||||
to_map(Items) when is_list(Items), length(Items) rem 2 == 0 ->
|
||||
to_map(Items, #{}).
|
||||
to_map([], Target) ->
|
||||
Target;
|
||||
to_map([K, V|Tail], Target) ->
|
||||
to_map(Tail, Target#{K => V}).
|
||||
|
||||
|
||||
174
apps/iot/src/websocket/ws_channel.erl
Normal file
174
apps/iot/src/websocket/ws_channel.erl
Normal file
@ -0,0 +1,174 @@
|
||||
%%%-------------------------------------------------------------------
|
||||
%%% @author licheng5
|
||||
%%% @copyright (C) 2021, <COMPANY>
|
||||
%%% @doc
|
||||
%%%
|
||||
%%% @end
|
||||
%%% Created : 11. 1月 2021 上午12:17
|
||||
%%%-------------------------------------------------------------------
|
||||
-module(ws_channel).
|
||||
-author("licheng5").
|
||||
-include("iot.hrl").
|
||||
|
||||
%% API
|
||||
-export([init/2]).
|
||||
-export([websocket_init/1, websocket_handle/2, websocket_info/2, terminate/3]).
|
||||
-export([publish/3, stop/2, send/2]).
|
||||
|
||||
-record(state, {
|
||||
uuid :: undefined | binary(),
|
||||
%% 用户进程id
|
||||
host_pid = undefined,
|
||||
%% 发送消息对应的id
|
||||
packet_id = 1 :: integer(),
|
||||
|
||||
%% 请求响应的对应关系
|
||||
inflight = #{}
|
||||
}).
|
||||
|
||||
%% 向通道中写入消息
|
||||
-spec publish(Pid :: pid(), ReceiverPid :: pid(), Msg :: binary()) -> Ref :: reference().
|
||||
publish(Pid, ReceiverPid, Msg) when is_pid(Pid), is_binary(Msg) ->
|
||||
Ref = make_ref(),
|
||||
Pid ! {publish, ReceiverPid, Ref, Msg},
|
||||
Ref.
|
||||
|
||||
%% 向通道中写入消息
|
||||
-spec send(Pid :: pid(), Msg :: binary()) -> no_return().
|
||||
send(Pid, Msg) when is_pid(Pid), is_binary(Msg) ->
|
||||
Pid ! {send, Msg}.
|
||||
|
||||
%% 关闭方法
|
||||
-spec stop(Pid :: pid(), Reason :: any()) -> no_return().
|
||||
stop(undefined, _Reason) ->
|
||||
ok;
|
||||
stop(Pid, Reason) when is_pid(Pid) ->
|
||||
Pid ! {stop, Reason}.
|
||||
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
%% 逻辑处理方法
|
||||
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
init(Req, Opts) ->
|
||||
{cowboy_websocket, Req, Opts}.
|
||||
|
||||
websocket_init(_State) ->
|
||||
lager:debug("[ws_channel] get a new connection"),
|
||||
%% 初始状态为true
|
||||
{ok, #state{packet_id = 1}}.
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, PacketId:32, ?METHOD_AUTH:8, Data/binary>>}, State) ->
|
||||
#{<<"uuid">> := UUID, <<"timestamp">> := Timestamp, <<"salt">> := Salt, <<"username">> := Username, <<"token">> := Token} = jiffy:decode(Data, [return_maps]),
|
||||
lager:debug("[ws_channel] auth uuid: ~p, request message: ~p", [UUID, Data]),
|
||||
case iot_auth:check(Username, Token, UUID, Salt, Timestamp) of
|
||||
true ->
|
||||
case host_bo:get_host_by_uuid(UUID) of
|
||||
undefined ->
|
||||
lager:warning("[ws_channel] uuid: ~p, user: ~p, host not found", [UUID, Username]),
|
||||
{stop, State};
|
||||
{ok, _} ->
|
||||
%% 尝试启动主机的服务进程
|
||||
{ok, HostPid} = iot_host_sup:ensured_host_started(UUID),
|
||||
case iot_host:attach_channel(HostPid, self()) of
|
||||
ok ->
|
||||
%% 建立到host的monitor
|
||||
erlang:monitor(process, HostPid),
|
||||
Reply = jiffy:encode(#{<<"code">> => 1, <<"message">> => <<"ok">>}, [force_utf8]),
|
||||
|
||||
{reply, {binary, <<?PACKET_RESPONSE, PacketId:32, 0:8, Reply/binary>>}, State#state{uuid = UUID, host_pid = HostPid}};
|
||||
{error, Reason} ->
|
||||
lager:debug("[ws_channel] uuid: ~p, attach channel get error: ~p", [UUID, Reason]),
|
||||
{stop, State}
|
||||
end
|
||||
end;
|
||||
false ->
|
||||
lager:warning("[ws_channel] uuid: ~p, user: ~p, auth failed", [UUID, Username]),
|
||||
{stop, State}
|
||||
end;
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, PacketId:32, ?METHOD_CREATE_SESSION:8, PubKey/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
{ok, Reply} = iot_host:create_session(HostPid, PubKey),
|
||||
{reply, {binary, <<?PACKET_RESPONSE, PacketId:32, Reply/binary>>}, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_DATA:8, Data/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {data, Data}),
|
||||
{ok, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_PING:8, CipherMetric/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {ping, CipherMetric}),
|
||||
{ok, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_INFORM:8, CipherInfo/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {inform, CipherInfo}),
|
||||
{ok, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_FEEDBACK_STEP:8, CipherInfo/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {feedback_step, CipherInfo}),
|
||||
{ok, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_FEEDBACK_RESULT:8, CipherInfo/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {feedback_result, CipherInfo}),
|
||||
{ok, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_EVENT:8, CipherEvent/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {event, CipherEvent}),
|
||||
{ok, State};
|
||||
|
||||
websocket_handle({binary, <<?PACKET_REQUEST, _PacketId:32, ?METHOD_AI_EVENT:8, CipherEvent/binary>>}, State = #state{host_pid = HostPid}) when is_pid(HostPid) ->
|
||||
iot_host:handle(HostPid, {ai_event, CipherEvent}),
|
||||
{ok, State};
|
||||
|
||||
%% 主机端的消息响应
|
||||
websocket_handle({binary, <<?PACKET_PUBLISH_RESPONSE, 0:32, Body/binary>>}, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[ws_channel] uuid: ~p, get send response message: ~p", [UUID, Body]),
|
||||
{ok, State};
|
||||
websocket_handle({binary, <<?PACKET_PUBLISH_RESPONSE, PacketId:32, Body/binary>>}, State = #state{uuid = UUID, inflight = Inflight}) when PacketId > 0 ->
|
||||
lager:debug("[ws_channel] uuid: ~p, get publish response message: ~p, packet_id: ~p", [UUID, Body, PacketId]),
|
||||
case maps:take(PacketId, Inflight) of
|
||||
error ->
|
||||
lager:warning("[ws_channel] get unknown publish response message: ~p, packet_id: ~p", [Body, PacketId]),
|
||||
{ok, State};
|
||||
{{ReceiverPid, Ref}, NInflight} ->
|
||||
case is_pid(ReceiverPid) andalso is_process_alive(ReceiverPid) of
|
||||
true when Body == <<>> ->
|
||||
ReceiverPid ! {ws_response, Ref};
|
||||
true ->
|
||||
ReceiverPid ! {ws_response, Ref, Body};
|
||||
false ->
|
||||
lager:warning("[ws_channel] get publish response message: ~p, packet_id: ~p, but receiver_pid is deaded", [Body, PacketId])
|
||||
end,
|
||||
{ok, State#state{inflight = NInflight}}
|
||||
end;
|
||||
|
||||
websocket_handle(Info, State) ->
|
||||
lager:error("[ws_channel] get a unknown message: ~p, channel will closed", [Info]),
|
||||
{stop, State}.
|
||||
|
||||
%% 处理关闭信号
|
||||
websocket_info({stop, Reason}, State) ->
|
||||
lager:debug("[ws_channel] the channel will be closed with reason: ~p", [Reason]),
|
||||
{stop, State};
|
||||
|
||||
%% 发送消息
|
||||
websocket_info({publish, ReceiverPid, Ref, Msg}, State = #state{packet_id = PacketId, inflight = Inflight}) when is_binary(Msg) ->
|
||||
NInflight = maps:put(PacketId, {ReceiverPid, Ref}, Inflight),
|
||||
{reply, {binary, <<?PACKET_PUBLISH, PacketId:32, Msg/binary>>}, State#state{packet_id = PacketId + 1, inflight = NInflight}};
|
||||
|
||||
%% 发送消息, 不需要等待回复
|
||||
websocket_info({send, Msg}, State) when is_binary(Msg) ->
|
||||
{reply, {binary, <<?PACKET_PUBLISH, 0:32, Msg/binary>>}, State};
|
||||
|
||||
%% 用户进程关闭,则关闭通道
|
||||
websocket_info({'DOWN', _, process, HostPid, Reason}, State = #state{uuid = UUID, host_pid = HostPid}) ->
|
||||
lager:debug("[ws_channel] uuid: ~p, channel will close because host exited with reason: ~p", [UUID, Reason]),
|
||||
{stop, State};
|
||||
|
||||
%% 处理其他未知消息
|
||||
websocket_info(Info, State = #state{uuid = UUID}) ->
|
||||
lager:debug("[ws_channel] channel get unknown info: ~p, uuid: ~p", [Info, UUID]),
|
||||
{ok, State}.
|
||||
|
||||
%% 进程关闭事件
|
||||
terminate(Reason, _Req, State) ->
|
||||
lager:debug("[ws_channel] channel close with reason: ~p, state is: ~p", [Reason, State]),
|
||||
ok.
|
||||
132
config/sys-dev.config
Normal file
132
config/sys-dev.config
Normal file
@ -0,0 +1,132 @@
|
||||
[
|
||||
{iot, [
|
||||
{http_server, [
|
||||
{port, 18080},
|
||||
{acceptors, 500},
|
||||
{max_connections, 10240},
|
||||
{backlog, 10240}
|
||||
]},
|
||||
|
||||
{redis_server, [
|
||||
{port, 16379},
|
||||
{acceptors, 500},
|
||||
{max_connections, 10240},
|
||||
{backlog, 10240}
|
||||
]},
|
||||
|
||||
{udp_server, [
|
||||
{port, 18080}
|
||||
]},
|
||||
|
||||
{api_url, "http://39.98.184.67:8800/api/v1/taskLog"},
|
||||
|
||||
%% 目标服务器地址
|
||||
{emqx_server, [
|
||||
{host, {39, 98, 184, 67}},
|
||||
{port, 1883},
|
||||
{tcp_opts, []},
|
||||
{username, "test"},
|
||||
{password, "test1234"},
|
||||
{keepalive, 86400},
|
||||
{retry_interval, 5}
|
||||
]},
|
||||
|
||||
%% 权限检验时的预埋token
|
||||
{pre_tokens, [
|
||||
{<<"test">>, <<"iot2023">>}
|
||||
]},
|
||||
|
||||
%% 配置中电的数据转发, mqtt协议
|
||||
{zhongdian, [
|
||||
{host, "39.98.184.67"},
|
||||
{port, 1883},
|
||||
{username, "test"},
|
||||
{password, "test1234"},
|
||||
{topic, "CET/NX/upload"},
|
||||
{qos, 2}
|
||||
]},
|
||||
|
||||
%% 金智调度系统
|
||||
{jinzhi, [
|
||||
{pri_key, "jinzhi_pri.key"},
|
||||
{url, "http://172.30.6.177:9080/device/push"},
|
||||
{pool_size, 10}
|
||||
]},
|
||||
|
||||
{pools, [
|
||||
%% mysql连接池配置
|
||||
{mysql_iot,
|
||||
[{size, 10}, {max_overflow, 20}, {worker_module, mysql}],
|
||||
[
|
||||
{host, {39, 98, 184, 67}},
|
||||
{port, 3306},
|
||||
{user, "nannonguser"},
|
||||
{connect_mode, lazy},
|
||||
{keep_alive, true},
|
||||
{password, "nannong@Fe7w"},
|
||||
{database, "nannong"},
|
||||
{queries, [<<"set names utf8">>]}
|
||||
]
|
||||
},
|
||||
|
||||
%% redis连接池
|
||||
{redis_pool,
|
||||
[{size, 10}, {max_overflow, 20}, {worker_module, eredis}],
|
||||
[
|
||||
{host, "39.98.184.67"},
|
||||
{port, 26379},
|
||||
{database, 1}
|
||||
]
|
||||
},
|
||||
|
||||
%% influxdb数据库配置, 测试环境的: 用户名: iot; password: password1234
|
||||
{influx_pool,
|
||||
[{size, 100}, {max_overflow, 200}, {worker_module, influx_client}],
|
||||
[
|
||||
{host, "39.98.184.67"},
|
||||
{port, 8086},
|
||||
{token, <<"IUQ04qecTie7LSuX1EDFBeqspClOdoRBfmXDQxhoEjiJFeW8M-Ui66t981YvviI5qOBpf_ZLgJlBx7nid2lyJQ==">>}
|
||||
]
|
||||
}
|
||||
|
||||
]}
|
||||
|
||||
]},
|
||||
|
||||
|
||||
%% 系统日志配置,系统日志为lager, 支持日志按日期自动分割
|
||||
{lager, [
|
||||
{colored, true},
|
||||
%% Whether to write a crash log, and where. Undefined means no crash logger.
|
||||
{crash_log, "trade_hub.crash.log"},
|
||||
%% Maximum size in bytes of events in the crash log - defaults to 65536
|
||||
{crash_log_msg_size, 65536},
|
||||
%% Maximum size of the crash log in bytes, before its rotated, set
|
||||
%% to 0 to disable rotation - default is 0
|
||||
{crash_log_size, 10485760},
|
||||
%% What time to rotate the crash log - default is no time
|
||||
%% rotation. See the README for a description of this format.
|
||||
{crash_log_date, "$D0"},
|
||||
%% Number of rotated crash logs to keep, 0 means keep only the
|
||||
%% current one - default is 0
|
||||
{crash_log_count, 5},
|
||||
%% Whether to redirect error_logger messages into lager - defaults to true
|
||||
{error_logger_redirect, true},
|
||||
|
||||
%% How big the gen_event mailbox can get before it is switched into sync mode
|
||||
{async_threshold, 20},
|
||||
%% Switch back to async mode, when gen_event mailbox size decrease from `async_threshold'
|
||||
%% to async_threshold - async_threshold_window
|
||||
{async_threshold_window, 5},
|
||||
|
||||
{handlers, [
|
||||
%% debug | info | warning | error, 日志级别
|
||||
{lager_console_backend, debug},
|
||||
{lager_file_backend, [{file, "debug.log"}, {level, debug}, {size, 314572800}]},
|
||||
{lager_file_backend, [{file, "notice.log"}, {level, notice}, {size, 314572800}]},
|
||||
{lager_file_backend, [{file, "error.log"}, {level, error}, {size, 314572800}]},
|
||||
{lager_file_backend, [{file, "info.log"}, {level, info}, {size, 314572800}]}
|
||||
]}
|
||||
]}
|
||||
|
||||
].
|
||||
121
config/sys-prod.config
Normal file
121
config/sys-prod.config
Normal file
@ -0,0 +1,121 @@
|
||||
[
|
||||
{iot, [
|
||||
{http_server, [
|
||||
{port, 18080},
|
||||
{acceptors, 500},
|
||||
{max_connections, 10240},
|
||||
{backlog, 10240}
|
||||
]},
|
||||
|
||||
{redis_server, [
|
||||
{port, 16379},
|
||||
{acceptors, 500},
|
||||
{max_connections, 10240},
|
||||
{backlog, 10240}
|
||||
]},
|
||||
|
||||
{udp_server, [
|
||||
{port, 18080}
|
||||
]},
|
||||
|
||||
%% 权限检验时的预埋token
|
||||
{pre_tokens, [
|
||||
{<<"test">>, <<"iot2023">>}
|
||||
]},
|
||||
|
||||
{api_url, "https://lgsiot.njau.edu.cn/api/v1/taskLog"},
|
||||
|
||||
%% 配置中电的数据转发, mqtt协议
|
||||
{zhongdian, [
|
||||
{host, "172.30.6.161"},
|
||||
{port, 1883},
|
||||
{username, "admin"},
|
||||
{password, "123456"},
|
||||
{topic, "CET/NX/upload"},
|
||||
{qos, 2}
|
||||
]},
|
||||
|
||||
%% 金智调度系统
|
||||
{jinzhi, [
|
||||
{pri_key, "jinzhi_pri.key"},
|
||||
{url, "http://172.30.6.177:9080/device/push"},
|
||||
{pool_size, 10}
|
||||
]},
|
||||
|
||||
{pools, [
|
||||
%% mysql连接池配置
|
||||
{mysql_iot,
|
||||
[{size, 10}, {max_overflow, 20}, {worker_module, mysql}],
|
||||
[
|
||||
{host, {172, 30, 6, 182}},
|
||||
{port, 3306},
|
||||
{user, "nannonguser"},
|
||||
{connect_mode, lazy},
|
||||
{keep_alive, true},
|
||||
{password, "nannong@Fe7w"},
|
||||
{database, "nannong"},
|
||||
{queries, [<<"set names utf8">>]}
|
||||
]
|
||||
},
|
||||
|
||||
%% redis连接池
|
||||
{redis_pool,
|
||||
[{size, 10}, {max_overflow, 20}, {worker_module, eredis}],
|
||||
[
|
||||
{host, "172.19.0.7"},
|
||||
{port, 6379},
|
||||
{database, 1}
|
||||
]
|
||||
},
|
||||
|
||||
%% influxdb数据库配置
|
||||
{influx_pool,
|
||||
[{size, 100}, {max_overflow, 200}, {worker_module, influx_client}],
|
||||
[
|
||||
{host, "172.19.0.4"},
|
||||
{port, 8086},
|
||||
{token, <<"A-ZRjqMK_7NR45lXXEiR7AEtYCd1ETzq9Z61FTMQLb5O4-1hSf8sCrjdPB84e__xsrItKHL3qjJALgbYN-H_VQ==">>}
|
||||
]
|
||||
}
|
||||
|
||||
]}
|
||||
|
||||
]},
|
||||
|
||||
|
||||
%% 系统日志配置,系统日志为lager, 支持日志按日期自动分割
|
||||
{lager, [
|
||||
{colored, true},
|
||||
%% Whether to write a crash log, and where. Undefined means no crash logger.
|
||||
{crash_log, "trade_hub.crash.log"},
|
||||
%% Maximum size in bytes of events in the crash log - defaults to 65536
|
||||
{crash_log_msg_size, 65536},
|
||||
%% Maximum size of the crash log in bytes, before its rotated, set
|
||||
%% to 0 to disable rotation - default is 0
|
||||
{crash_log_size, 10485760},
|
||||
%% What time to rotate the crash log - default is no time
|
||||
%% rotation. See the README for a description of this format.
|
||||
{crash_log_date, "$D0"},
|
||||
%% Number of rotated crash logs to keep, 0 means keep only the
|
||||
%% current one - default is 0
|
||||
{crash_log_count, 5},
|
||||
%% Whether to redirect error_logger messages into lager - defaults to true
|
||||
{error_logger_redirect, true},
|
||||
|
||||
%% How big the gen_event mailbox can get before it is switched into sync mode
|
||||
{async_threshold, 20},
|
||||
%% Switch back to async mode, when gen_event mailbox size decrease from `async_threshold'
|
||||
%% to async_threshold - async_threshold_window
|
||||
{async_threshold_window, 5},
|
||||
|
||||
{handlers, [
|
||||
%% debug | info | warning | error, 日志级别
|
||||
{lager_console_backend, debug},
|
||||
{lager_file_backend, [{file, "debug.log"}, {level, debug}, {size, 314572800}]},
|
||||
{lager_file_backend, [{file, "notice.log"}, {level, notice}, {size, 314572800}]},
|
||||
{lager_file_backend, [{file, "error.log"}, {level, error}, {size, 314572800}]},
|
||||
{lager_file_backend, [{file, "info.log"}, {level, info}, {size, 314572800}]}
|
||||
]}
|
||||
]}
|
||||
|
||||
].
|
||||
17
config/vm.args
Normal file
17
config/vm.args
Normal file
@ -0,0 +1,17 @@
|
||||
-sname iot
|
||||
|
||||
-setcookie iot_cookie
|
||||
|
||||
+K true
|
||||
+A30
|
||||
|
||||
-mnesia dir '"/usr/local/var/mnesia/iot"'
|
||||
-mnesia dump_log_write_threshold 50000
|
||||
-mnesia dc_dump_limit 40
|
||||
|
||||
-sbt db
|
||||
|
||||
+K true
|
||||
+A 128
|
||||
+P 1048576
|
||||
+t 10485760
|
||||
14
docker-compose.yml
Normal file
14
docker-compose.yml
Normal file
@ -0,0 +1,14 @@
|
||||
version: '3.6'
|
||||
|
||||
services:
|
||||
iot:
|
||||
container_name: iot
|
||||
image: "iot:1.0"
|
||||
hostname: 'iot'
|
||||
restart: always
|
||||
ports:
|
||||
- 18080:18080/tcp
|
||||
- 16379:16379/tcp
|
||||
volumes:
|
||||
- /var/log/iot/:/data/iot/log/
|
||||
- /usr/local/var/mnesia/iot/:/usr/local/var/mnesia/iot/
|
||||
63
docs/endpoint.md
Normal file
63
docs/endpoint.md
Normal file
@ -0,0 +1,63 @@
|
||||
## Endpoint管理
|
||||
|
||||
### 获取全部的Endpoint
|
||||
|
||||
```html
|
||||
method: GET
|
||||
url: /endpoint/all
|
||||
|
||||
返回数据:
|
||||
[
|
||||
{
|
||||
"name": "名称",
|
||||
"title": "中电集团"
|
||||
"matcher": "匹配的正则表达式",
|
||||
"protocol": "http|https|websocket|mqtt|kafka",
|
||||
"config": "参考config格式说明"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
```
|
||||
|
||||
### 创建Endpoint
|
||||
|
||||
```html
|
||||
method: POST
|
||||
url: /endpoint/create
|
||||
body: (content-type: application/json)
|
||||
{"name": $name, "matcher": $matcher, "title": $title, "protocol": "http|https|websocket|kafka|mqtt", "config": "参考config格式说明"}
|
||||
|
||||
说明:
|
||||
name是唯一的,不同的终端名称代表不同的接受端
|
||||
|
||||
```
|
||||
|
||||
### 删除Endpoint
|
||||
```html
|
||||
method: POST
|
||||
url: /endpoint/delete
|
||||
body: (content-type: application/json)
|
||||
{"name": $name}
|
||||
```
|
||||
|
||||
### config格式说明
|
||||
```html
|
||||
|
||||
http|https
|
||||
{"url": "http(s)://xx.com"}
|
||||
|
||||
websocket
|
||||
{"url": "ws://xx.com/ws"}
|
||||
|
||||
kafka:
|
||||
{"bootstrap_server": ["localhost:9092"], "topic": "test", "username": "test", "password": "password1234"}
|
||||
|
||||
mysql:
|
||||
{"host": "localhost", port: 3306, "username": "test", "password": "test1234", "database": "iot", "table_name": "north_data"}
|
||||
|
||||
mqtt:
|
||||
{"host": "localhost", port: 1883, "username": "test", "password": "test1234", "topic": "CET/NX/${location_code}/upload", "qos": 0|1|2}
|
||||
|
||||
topic中支持预定义变量: ${location_code}; 发送的时候会替换成对应的点位编码
|
||||
```
|
||||
11
docs/heartbeat.md
Normal file
11
docs/heartbeat.md
Normal file
@ -0,0 +1,11 @@
|
||||
## 心跳机制
|
||||
* 边缘主机通过心跳机制来判断主机是否存活(解决弱网环境下websocket链接会经常断开的问题)
|
||||
* 边缘主机每隔5秒发送一次心跳包,服务端每隔2分钟检测一下,判断是否有收到心跳包;如果没有收到则认为主机离线
|
||||
|
||||
### udp服务器
|
||||
* 端口: 18080
|
||||
|
||||
### 心跳包格式
|
||||
* <<Len:2, HostUUID/binary>>
|
||||
* Len表示HostUUID对应的字节数,Len本身占用2字节长度(HostUUID不一定是固定长度,因此需要标注)
|
||||
* 注解:采用这种格式是为了方便后续扩展别的心跳信息字段
|
||||
1213
docs/north_data.md
Normal file
1213
docs/north_data.md
Normal file
File diff suppressed because it is too large
Load Diff
95
docs/publish_command.md
Normal file
95
docs/publish_command.md
Normal file
@ -0,0 +1,95 @@
|
||||
# 命令下发结构
|
||||
|
||||
## 1. 服务器端和边缘主机采用websocket协议通讯
|
||||
|
||||
## 2. 下发的数据格式如下
|
||||
<<T:1byte, Body:任意长度(先json序列化,然后aes加密)>>, 其中
|
||||
|
||||
"t": 1|2|3|4|5,
|
||||
Body:
|
||||
|
||||
```json
|
||||
{
|
||||
// 针对不同的命令类型,这个字段里的`to`和`m`数据有所不同,具体在下面的小节描述
|
||||
// 任务id,服务端在下发数据的时候,需要生成一个唯一的uuid,
|
||||
// 用于标识一个任务
|
||||
"t_id": "任务id",
|
||||
// 表示发给哪个微服务,这里是服务的标识,即服务名称
|
||||
"to": "",
|
||||
// 命令执行的超时时间,单位为秒
|
||||
"t": 10,
|
||||
// 实际内容
|
||||
"m": "$bytes",
|
||||
}
|
||||
```
|
||||
## 3. 加密前的消息结构如下:
|
||||
消息类型,目前支持四种消息类型:
|
||||
* 1代表参数下发,就是向该设备端的微服务发送消息,该消息会辗转发送给微服务进行处理,比如,设置modbus微服务的波特率等消息
|
||||
* 2代表采集向下发,比如,设置某个设备短上的modbus微服务采集某个地址的数据
|
||||
* 3代表下发微服务文件。
|
||||
* 4代表下发场景,这个指令用于设置设备端上各个微服务之间的逐句流转。
|
||||
* 5代表删除场景
|
||||
|
||||
### 3.1 参数下发的结构
|
||||
对于参数下发,下发内容中的m为一个`map[string]interface{}`结构,用于向某个微服务发送参数,具体参数内容由微服务的参数配置提供。
|
||||
|
||||
### 3.2 微服务的启动和停止
|
||||
微服务的启动和停止由内置服务`service-monitor`管理,所以,实际启动和停止,只需要给该服务发送参数就行,其他流程(返回的step和result等)保持一致。实际下发的结构为:
|
||||
|
||||
```json
|
||||
{
|
||||
// 针对不同的命令类型,这个字段里的`to`和`m`数据有所不同,具体在下面的小节描述
|
||||
// 任务id,服务端在下发数据的时候,需要生成一个唯一的uuid,
|
||||
// 用于标识一个任务
|
||||
"t_id": "任务id",
|
||||
// 表示发给哪个微服务,启动和停止,都是发给内置服务service-monitor
|
||||
"to": "service-monitor",
|
||||
// 命令执行的超时时间,单位为秒
|
||||
"t": 10,
|
||||
// 实际内容
|
||||
"m": {
|
||||
"service_name": "需要启动或者停止的服务名, ${name}${copy}-${version}的格式",
|
||||
"action": "start|stop",
|
||||
"command": "如果是start,则需要传递启动命令,启动命令由config.yaml配置文件的boot字段指定"
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
### 3.3 采集项下发的结构
|
||||
采集项下发时,下发内容中的m为一个`[]map[string]interface{}`结构的列表,每一个条目是一个采集项内容,具体采集向内容由微服务的采集项配置提供。
|
||||
|
||||
### 3.4 场景下发的结构
|
||||
在场景下发中,`to`字段会被忽略,可以填写空字符串,而m字段为json化之后的数据,json化之前结构如下:
|
||||
|
||||
```json
|
||||
{
|
||||
"scene_id": “场景的uuid”,
|
||||
"scene_name": "场景名称",
|
||||
// 节点列表
|
||||
"v": [{
|
||||
”id“: "节点id",
|
||||
"service_name": "服务名,$name-version的形式",
|
||||
"real_service": "实际服务,$name$copy-$version的形式",
|
||||
"url": "服务下载url",
|
||||
"md5": "服务的md5值",
|
||||
"props": "props",
|
||||
"display_name": "涂上展示的信息"
|
||||
}]
|
||||
// 连线列表
|
||||
”e“: [{
|
||||
"from": 出节点的id,
|
||||
"to": 入节点的id,
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
### 3.5 删除场景
|
||||
在场景删除中,`to`字段会被忽略,可以填写空字符串,而m字段为json化之后的数据,json化之前结构如下:
|
||||
|
||||
```json
|
||||
{
|
||||
"scene_id": "scene_id"
|
||||
}
|
||||
```
|
||||
```ssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssssss
|
||||
3
docs/router.md
Normal file
3
docs/router.md
Normal file
@ -0,0 +1,3 @@
|
||||
# 数据路由设计文档
|
||||
|
||||
|
||||
267
docs/websocket.md
Normal file
267
docs/websocket.md
Normal file
@ -0,0 +1,267 @@
|
||||
# websocket通讯格式逻辑说明
|
||||
|
||||
## 消息体以下格式
|
||||
1. 主机发送的请求 (0x01)
|
||||
2. 服务端对主机请求的响应 (0x02)
|
||||
|
||||
3. 服务端对主机的消息推送 (0x03)
|
||||
4. 主机对服务器推送消息的响应 (0x04)
|
||||
|
||||
5. 服务器端对主机推送的指令 (0x05)
|
||||
5. 主机对服务器推送的指令的响应 (0x06)
|
||||
|
||||
## 消息体的格式说明
|
||||
<<消息体类型/1byte, PacketId/4byte, Packet/任意长度>>
|
||||
|
||||
## 特殊说明
|
||||
* 服务器端处理异常时,直接关闭websocket连接
|
||||
|
||||
## 消息类型说明
|
||||
|
||||
### register消息
|
||||
|
||||
#### 请求
|
||||
<<0x01, PacketId:4, Method:1, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值必须大于0;
|
||||
Method: 0x00
|
||||
Body: {uuid: string, salt: string, username: string, token: string}, json序列化后的二级制数据,明文
|
||||
|
||||
### 响应
|
||||
<<0x02, PacketId:4, Reply>>
|
||||
Reply: {code: 1, message: "ok"}
|
||||
|
||||
### create_session消息
|
||||
|
||||
#### 请求
|
||||
<<0x01, PacketId:4, 0x01, PubKey:任意长度(公钥信息)>>
|
||||
|
||||
PacketId: 4字节整数, 值必须大于0;
|
||||
|
||||
#### 响应
|
||||
<<0x02, PacketId:4, Reply>>
|
||||
Reply: {a: bool, aes: "服务器生成的aes的值"}
|
||||
|
||||
### data数据上传(无响应)
|
||||
<<0x01, PacketId:4, 0x02, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body:
|
||||
```text
|
||||
{
|
||||
"service_name": "从该设备端的哪个服务采集的数据",
|
||||
// 如果为空,就表明是微服务产生的数据,如果有值,表示是设备产生的数据
|
||||
"device_id": $uuid 非设备产生的device_id为空
|
||||
"at": int, 精确到毫秒
|
||||
// 该微服务采集的数据,是一个包含map的列表类型,map的内容可以由微服务自己指定
|
||||
// 目前一般的格式是"metric-name": $value样式的数据
|
||||
"fields": [
|
||||
{
|
||||
"key": "test"
|
||||
"value": 124,
|
||||
"unit": "U",
|
||||
"type": "AI:遥测值,DI:遥信值,SOE:事件",
|
||||
"timestamp": int
|
||||
}
|
||||
],
|
||||
// 微服务自身可以生成tag,用于微服务指定自己的一些性质,目前使用得不多,以后可以扩展,
|
||||
// 是一个map[string]string类型的数据
|
||||
"tags": {
|
||||
"tag1": "value1",
|
||||
"tag2", "value2"
|
||||
}
|
||||
// todo 在insert数据到influxdb的时候需要增加service_name + host_uuid
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
### ping数据上传(无响应)
|
||||
<<0x01, PacketId:4, 0x03, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body:
|
||||
```text
|
||||
{
|
||||
// 硬件信息,目前有剩余内存,剩余磁盘和cpu负载
|
||||
// 剩余内存,单位为mb
|
||||
|
||||
"memory": {
|
||||
"total": 1024, // 内存数,mb
|
||||
"used": "$int" // 剩余内存数
|
||||
},
|
||||
"disk": {
|
||||
"total": 1024, // 硬盘容量GB
|
||||
"used": "$int" // 剩余硬盘内容GB
|
||||
},
|
||||
|
||||
"cpu_load": $float, // 浮点数
|
||||
"cpu_temperature": $float // 稳定信息
|
||||
"cpu_core": $int,
|
||||
|
||||
"boot_time": 2000, // 启动时间
|
||||
"efka_version": "1.0.0", // 客户端版本
|
||||
"kernel_arch": "arm64", // 客户端硬件架构
|
||||
"province": "", // 所在省
|
||||
"city": "", // 所在市
|
||||
"adcode": 100, // 所在城市的编号
|
||||
"ips": [
|
||||
"ip地址1",
|
||||
"ip地址2"
|
||||
],
|
||||
// 接口信息
|
||||
"interfaces": [
|
||||
{
|
||||
"status": 0|1, "接口状态,0离线,1在线"
|
||||
"name": "接口名称",
|
||||
"desc": "接口描述",
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### inform数据上传(无响应)
|
||||
<<0x01, PacketId:4, 0x04, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body:
|
||||
```text
|
||||
{
|
||||
"at": $int64,
|
||||
// 微服务信息
|
||||
"services": [{
|
||||
"scene_id": $int "场景的编号",
|
||||
"name": "微服务名称",
|
||||
"version": "微服务版本",
|
||||
"version_copy": "微服务副本",
|
||||
// 微服务是否在线,0表示离线,1表示在线
|
||||
"status": 0|1
|
||||
}]
|
||||
}
|
||||
```
|
||||
|
||||
### feedback_step数据上传(无响应)
|
||||
<<0x01, PacketId:4, 0x05, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body:
|
||||
```text
|
||||
{
|
||||
"task_id": "任务的task id",
|
||||
// sc为step code,具体地:
|
||||
// 0代表该任务开始了,服务端创建该任务之后,是这个代码
|
||||
// 1代表任务被分发了,服务端向nats(mqtt)发送消息之后,是这个代码
|
||||
// 2代表任务被设备端接收到了
|
||||
// 3代表该任务已经被发送给微服务进行处理了
|
||||
// 4代表该任务已经被微服务收到了,微服务正在处理
|
||||
// 5代表任务已经完成,微服务已经处理完成。
|
||||
"code": $int
|
||||
}
|
||||
```
|
||||
|
||||
### feedback_result数据上传(无响应)
|
||||
<<0x01, PacketId:4, 0x06, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body:
|
||||
|
||||
```text
|
||||
{
|
||||
"task_id": "任务id",
|
||||
// unix nano类型
|
||||
"time": $int,
|
||||
// 返回的结果码,0代表成功,其他代表出错
|
||||
"code": $int,
|
||||
"reason": "任务执行的结果",
|
||||
"error": "错误消息,当c为非0时,这个字段会表示出错消息",
|
||||
// 返回任务类型,1表示任务是微服务下发,0代表是命令下发
|
||||
"type": 0 | 1,
|
||||
}
|
||||
```
|
||||
|
||||
### 主机上传终端设备的相关事件
|
||||
<<0x01, PacketId:4, 0x07, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body: 事件内容,AES加密
|
||||
|
||||
```text
|
||||
|
||||
设备的离在线状态
|
||||
|
||||
{
|
||||
"event_type": 1,
|
||||
"params": {
|
||||
"device_uuid": "",
|
||||
"status": 0 // 1在线, 0离线
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## 主机上传AI事件
|
||||
|
||||
<<0x01, PacketId:4, 0x08, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数, 值为0;
|
||||
Body: 事件内容,AES加密
|
||||
|
||||
```text
|
||||
|
||||
设备的离在线状态
|
||||
|
||||
{
|
||||
"event_type": 1, // 事件类型
|
||||
"params": {
|
||||
"device_uuid": "",
|
||||
"description": "垃圾溢满",
|
||||
"datetime": "2023-06-10 12:00:00",
|
||||
"event_code": "事件编码,采集项下发的事件编码: 5位事件编码", //1. 异物占道(异物识别) 20405 2.垃圾溢满 20453
|
||||
"attachments": [
|
||||
{
|
||||
"name": "垃圾溢满",
|
||||
"filename": "2023-12-10-xyz.hdc"
|
||||
},
|
||||
{
|
||||
"name": "垃圾溢满",
|
||||
"filename": "2023-12-10-xyz.hdc"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## 指令说明
|
||||
|
||||
### 指令返回格式说明(按照json_rpc 2.0的规范)
|
||||
|
||||
```text
|
||||
成功: {"result": map | array | string | any}
|
||||
|
||||
失败: {"error": {code: int, message: "错误描述"}}
|
||||
```
|
||||
|
||||
### 服务器对主机推送的指令格式
|
||||
|
||||
<<0x05, PacketId:4, Body:任意长度>>
|
||||
|
||||
PacketId: 4字节整数; 如果值为0,则不需要返回指令执行结果
|
||||
Body: 事件内容,AES加密
|
||||
|
||||
#### 计费电表, 下发指令
|
||||
|
||||
```text
|
||||
{
|
||||
"device_uuid": "xxxxxx", // 设备的device_uuid, 数组格式
|
||||
"version": "1.0",
|
||||
"directive_type": 0x01, // 中电计费电表控制
|
||||
"timeout": 10, // 指令执行超时时间
|
||||
"directive": {
|
||||
"type": "ctrl", // 遥控
|
||||
"stype": int, // 遥控类型,0: 遥控, 1: 遥调, 2: 置数
|
||||
"ctype": int, // 遥控动作, 0: 打开,1: 闭合
|
||||
"value": double, // 控制参数
|
||||
"timestamp": 17031000000 // 发命令时间
|
||||
}
|
||||
}
|
||||
|
||||
58
docs/zhongdian_mqtt.md
Normal file
58
docs/zhongdian_mqtt.md
Normal file
@ -0,0 +1,58 @@
|
||||
# 中电mqtt通讯规约
|
||||
## 服务器地址
|
||||
MQTT服务器IP:172.30.6.161
|
||||
MQTT服务器端口:1883
|
||||
MQTT服务器账号:admin
|
||||
MQTT服务器密码:public
|
||||
|
||||
## topic
|
||||
南向:
|
||||
MQTT ClientID: CET/NX
|
||||
发布Topic:CET/NX/upload
|
||||
订阅Topic:CET/NX/downlod
|
||||
|
||||
中电:
|
||||
MQTT ClientID:CET/NX
|
||||
订阅Topic:CET/NX/upload
|
||||
发布Topic:CET/NX/download
|
||||
|
||||
## 数据格式
|
||||
```text
|
||||
{
|
||||
"version": "1.0",
|
||||
"location_code": "string",
|
||||
"ts ": 1688606685,
|
||||
"properties": [
|
||||
{
|
||||
"type": "AI",
|
||||
"key": "A相电流",
|
||||
"value": 0.25,
|
||||
"unit": "A",
|
||||
"timestamp": 1688354258
|
||||
},
|
||||
{
|
||||
"type": "AI",
|
||||
"key": "A相电压",
|
||||
"value": 220.5,
|
||||
"unit": "V",
|
||||
"timestamp": 1688354258
|
||||
},
|
||||
{
|
||||
"type": "SOE",
|
||||
"key": "电压越限",
|
||||
"value": 1,
|
||||
"unit": "V",
|
||||
"timestamp": 1688354258
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
南向上送数据
|
||||
|
||||
CET应答:
|
||||
{
|
||||
"location_code": string, //(点位编码信息),
|
||||
"ts ":1688606685 ,
|
||||
"result":1(int)
|
||||
}
|
||||
```
|
||||
49
rebar.config
Normal file
49
rebar.config
Normal file
@ -0,0 +1,49 @@
|
||||
{erl_opts, [debug_info]}.
|
||||
{deps, [
|
||||
{poolboy, ".*", {git, "https://github.com/devinus/poolboy.git", {tag, "1.5.1"}}},
|
||||
{hackney, ".*", {git, "https://github.com/benoitc/hackney.git", {tag, "1.16.0"}}},
|
||||
{sync, ".*", {git, "https://github.com/rustyio/sync.git", {branch, "master"}}},
|
||||
{cowboy, ".*", {git, "https://github.com/ninenines/cowboy.git", {tag, "2.10.0"}}},
|
||||
{esockd, ".*", {git, "https://github.com/emqx/esockd.git", {tag, "v5.7.3"}}},
|
||||
{jiffy, ".*", {git, "https://github.com/davisp/jiffy.git", {tag, "1.1.1"}}},
|
||||
{mysql, ".*", {git, "https://github.com/mysql-otp/mysql-otp", {tag, "1.8.0"}}},
|
||||
{eredis, ".*", {git, "https://github.com/wooga/eredis.git", {tag, "v1.2.0"}}},
|
||||
{parse_trans, ".*", {git, "https://github.com/uwiger/parse_trans", {tag, "3.0.0"}}},
|
||||
{lager, ".*", {git,"https://github.com/erlang-lager/lager.git", {tag, "3.9.2"}}}
|
||||
]}.
|
||||
|
||||
{relx, [{release, {iot, "0.1.0"},
|
||||
[iot,
|
||||
sasl]},
|
||||
|
||||
{mode, dev},
|
||||
%{mode, prod},
|
||||
|
||||
%% automatically picked up if the files
|
||||
%% exist but can be set manually, which
|
||||
%% is required if the names aren't exactly
|
||||
%% sys.config and vm.args
|
||||
{sys_config, "./config/sys.config"},
|
||||
{vm_args, "./config/vm.args"}
|
||||
|
||||
%% the .src form of the configuration files do
|
||||
%% not require setting RELX_REPLACE_OS_VARS
|
||||
%% {sys_config_src, "./config/sys.config.src"},
|
||||
%% {vm_args_src, "./config/vm.args.src"}
|
||||
]}.
|
||||
|
||||
{profiles, [{prod, [{relx,
|
||||
[%% prod is the default mode when prod
|
||||
%% profile is used, so does not have
|
||||
%% to be explicitly included like this
|
||||
{mode, prod}
|
||||
|
||||
%% use minimal mode to exclude ERTS
|
||||
%% {mode, minimal}
|
||||
]
|
||||
}]}]}.
|
||||
|
||||
{erl_opts, [{parse_transform,lager_transform}]}.
|
||||
|
||||
{rebar_packages_cdn, "https://hexpm.upyun.com"}.
|
||||
|
||||
77
rebar.lock
Normal file
77
rebar.lock
Normal file
@ -0,0 +1,77 @@
|
||||
{"1.2.0",
|
||||
[{<<"certifi">>,{pkg,<<"certifi">>,<<"2.5.2">>},1},
|
||||
{<<"cowboy">>,
|
||||
{git,"https://github.com/ninenines/cowboy.git",
|
||||
{ref,"9e600f6c1df3c440bc196b66ebbc005d70107217"}},
|
||||
0},
|
||||
{<<"cowlib">>,
|
||||
{git,"https://github.com/ninenines/cowlib",
|
||||
{ref,"cc04201c1d0e1d5603cd1cde037ab729b192634c"}},
|
||||
1},
|
||||
{<<"eredis">>,
|
||||
{git,"https://github.com/wooga/eredis.git",
|
||||
{ref,"9ad91f149310a7d002cb966f62b7e2c3330abb04"}},
|
||||
0},
|
||||
{<<"esockd">>,
|
||||
{git,"https://github.com/emqx/esockd.git",
|
||||
{ref,"d9ce4024cc42a65e9a05001997031e743442f955"}},
|
||||
0},
|
||||
{<<"fs">>,{pkg,<<"fs">>,<<"6.1.1">>},1},
|
||||
{<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},1},
|
||||
{<<"hackney">>,
|
||||
{git,"https://github.com/benoitc/hackney.git",
|
||||
{ref,"f3e9292db22c807e73f57a8422402d6b423ddf5f"}},
|
||||
0},
|
||||
{<<"idna">>,{pkg,<<"idna">>,<<"6.0.1">>},1},
|
||||
{<<"jiffy">>,
|
||||
{git,"https://github.com/davisp/jiffy.git",
|
||||
{ref,"9ea1b35b6e60ba21dfd4adbd18e7916a831fd7d4"}},
|
||||
0},
|
||||
{<<"lager">>,
|
||||
{git,"https://github.com/erlang-lager/lager.git",
|
||||
{ref,"459a3b2cdd9eadd29e5a7ce5c43932f5ccd6eb88"}},
|
||||
0},
|
||||
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},1},
|
||||
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.2.0">>},1},
|
||||
{<<"mysql">>,
|
||||
{git,"https://github.com/mysql-otp/mysql-otp",
|
||||
{ref,"caf5ff96c677a8fe0ce6f4082bc036c8fd27dd62"}},
|
||||
0},
|
||||
{<<"parse_trans">>,
|
||||
{git,"https://github.com/uwiger/parse_trans",
|
||||
{ref,"6f3645afb43c7c57d61b54ef59aecab288ce1013"}},
|
||||
0},
|
||||
{<<"poolboy">>,
|
||||
{git,"https://github.com/devinus/poolboy.git",
|
||||
{ref,"3bb48a893ff5598f7c73731ac17545206d259fac"}},
|
||||
0},
|
||||
{<<"ranch">>,
|
||||
{git,"https://github.com/ninenines/ranch",
|
||||
{ref,"a692f44567034dacf5efcaa24a24183788594eb7"}},
|
||||
1},
|
||||
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.6">>},1},
|
||||
{<<"sync">>,
|
||||
{git,"https://github.com/rustyio/sync.git",
|
||||
{ref,"f13e61a79623290219d7c10dff1dd94d91eee963"}},
|
||||
0},
|
||||
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.5.0">>},2}]}.
|
||||
[
|
||||
{pkg_hash,[
|
||||
{<<"certifi">>, <<"B7CFEAE9D2ED395695DD8201C57A2D019C0C43ECAF8B8BCB9320B40D6662F340">>},
|
||||
{<<"fs">>, <<"9D147B944D60CFA48A349F12D06C8EE71128F610C90870BDF9A6773206452ED0">>},
|
||||
{<<"goldrush">>, <<"F06E5D5F1277DA5C413E84D5A2924174182FB108DABB39D5EC548B27424CD106">>},
|
||||
{<<"idna">>, <<"1D038FB2E7668CE41FBF681D2C45902E52B3CB9E9C77B55334353B222C2EE50C">>},
|
||||
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
|
||||
{<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
|
||||
{<<"ssl_verify_fun">>, <<"CF344F5692C82D2CD7554F5EC8FD961548D4FD09E7D22F5B62482E5AEAEBD4B0">>},
|
||||
{<<"unicode_util_compat">>, <<"8516502659002CEC19E244EBD90D312183064BE95025A319A6C7E89F4BCCD65B">>}]},
|
||||
{pkg_hash_ext,[
|
||||
{<<"certifi">>, <<"3B3B5F36493004AC3455966991EAF6E768CE9884693D9968055AEEEB1E575040">>},
|
||||
{<<"fs">>, <<"EF94E95FFE79916860649FED80AC62B04C322B0BB70F5128144C026B4D171F8B">>},
|
||||
{<<"goldrush">>, <<"99CB4128CFFCB3227581E5D4D803D5413FA643F4EB96523F77D9E6937D994CEB">>},
|
||||
{<<"idna">>, <<"A02C8A1C4FD601215BB0B0324C8A6986749F807CE35F25449EC9E69758708122">>},
|
||||
{<<"metrics">>, <<"69B09ADDDC4F74A40716AE54D140F93BEB0FB8978D8636EADED0C31B6F099F16">>},
|
||||
{<<"mimerl">>, <<"F278585650AA581986264638EBF698F8BB19DF297F66AD91B18910DFC6E19323">>},
|
||||
{<<"ssl_verify_fun">>, <<"BDB0D2471F453C88FF3908E7686F86F9BE327D065CC1EC16FA4540197EA04680">>},
|
||||
{<<"unicode_util_compat">>, <<"D48D002E15F5CC105A696CF2F1BBB3FC72B4B770A184D8420C8DB20DA2674B38">>}]}
|
||||
].
|
||||
5
run
Executable file
5
run
Executable file
@ -0,0 +1,5 @@
|
||||
#! /bin/sh
|
||||
rebar3 compile
|
||||
rebar3 release
|
||||
|
||||
_build/default/rel/iot/bin/iot console
|
||||
Loading…
x
Reference in New Issue
Block a user