mirror of
https://github.com/chirpstack/chirpstack.git
synced 2025-08-24 08:47:04 +00:00
Compare commits
22 Commits
v4.13.0
...
blynk_inte
Author | SHA1 | Date | |
---|---|---|---|
|
e56530a620 | ||
|
4b068ea1c3 | ||
|
78e426963e | ||
|
050c2b37f3 | ||
|
1783a2b97e | ||
|
97af1a456f | ||
|
b859a56150 | ||
|
5bdc63ffa7 | ||
|
6200a0c04d | ||
|
2c66e9bd75 | ||
|
768a1e8f12 | ||
|
e26a6e09f0 | ||
|
7895b7f4dc | ||
|
b195d28f2d | ||
|
a9031ed1e2 | ||
|
4ae9b94779 | ||
|
1f2cf6c384 | ||
|
b73723fdb1 | ||
|
74e52d9970 | ||
|
bd50dc7640 | ||
|
20c3334f6b | ||
|
b821c38cac |
1309
Cargo.lock
generated
1309
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
1
api/go/Makefile
vendored
1
api/go/Makefile
vendored
@@ -21,6 +21,7 @@ api:
|
||||
protoc ${PROTOC_ARGS} api/gateway.proto
|
||||
protoc ${PROTOC_ARGS} api/multicast_group.proto
|
||||
protoc ${PROTOC_ARGS} api/relay.proto
|
||||
protoc ${PROTOC_ARGS} api/fuota.proto
|
||||
|
||||
integration:
|
||||
protoc ${PROTOC_ARGS} integration/integration.proto
|
||||
|
1421
api/go/api/application.pb.go
vendored
1421
api/go/api/application.pb.go
vendored
File diff suppressed because it is too large
Load Diff
320
api/go/api/application_grpc.pb.go
vendored
320
api/go/api/application_grpc.pb.go
vendored
@@ -42,10 +42,6 @@ const (
|
||||
ApplicationService_GetMyDevicesIntegration_FullMethodName = "/api.ApplicationService/GetMyDevicesIntegration"
|
||||
ApplicationService_UpdateMyDevicesIntegration_FullMethodName = "/api.ApplicationService/UpdateMyDevicesIntegration"
|
||||
ApplicationService_DeleteMyDevicesIntegration_FullMethodName = "/api.ApplicationService/DeleteMyDevicesIntegration"
|
||||
ApplicationService_CreateLoraCloudIntegration_FullMethodName = "/api.ApplicationService/CreateLoraCloudIntegration"
|
||||
ApplicationService_GetLoraCloudIntegration_FullMethodName = "/api.ApplicationService/GetLoraCloudIntegration"
|
||||
ApplicationService_UpdateLoraCloudIntegration_FullMethodName = "/api.ApplicationService/UpdateLoraCloudIntegration"
|
||||
ApplicationService_DeleteLoraCloudIntegration_FullMethodName = "/api.ApplicationService/DeleteLoraCloudIntegration"
|
||||
ApplicationService_CreateGcpPubSubIntegration_FullMethodName = "/api.ApplicationService/CreateGcpPubSubIntegration"
|
||||
ApplicationService_GetGcpPubSubIntegration_FullMethodName = "/api.ApplicationService/GetGcpPubSubIntegration"
|
||||
ApplicationService_UpdateGcpPubSubIntegration_FullMethodName = "/api.ApplicationService/UpdateGcpPubSubIntegration"
|
||||
@@ -66,6 +62,10 @@ const (
|
||||
ApplicationService_GetIftttIntegration_FullMethodName = "/api.ApplicationService/GetIftttIntegration"
|
||||
ApplicationService_UpdateIftttIntegration_FullMethodName = "/api.ApplicationService/UpdateIftttIntegration"
|
||||
ApplicationService_DeleteIftttIntegration_FullMethodName = "/api.ApplicationService/DeleteIftttIntegration"
|
||||
ApplicationService_CreateBlynkIntegration_FullMethodName = "/api.ApplicationService/CreateBlynkIntegration"
|
||||
ApplicationService_GetBlynkIntegration_FullMethodName = "/api.ApplicationService/GetBlynkIntegration"
|
||||
ApplicationService_UpdateBlynkIntegration_FullMethodName = "/api.ApplicationService/UpdateBlynkIntegration"
|
||||
ApplicationService_DeleteBlynkIntegration_FullMethodName = "/api.ApplicationService/DeleteBlynkIntegration"
|
||||
ApplicationService_GenerateMqttIntegrationClientCertificate_FullMethodName = "/api.ApplicationService/GenerateMqttIntegrationClientCertificate"
|
||||
ApplicationService_ListDeviceProfiles_FullMethodName = "/api.ApplicationService/ListDeviceProfiles"
|
||||
ApplicationService_ListDeviceTags_FullMethodName = "/api.ApplicationService/ListDeviceTags"
|
||||
@@ -122,14 +122,6 @@ type ApplicationServiceClient interface {
|
||||
UpdateMyDevicesIntegration(ctx context.Context, in *UpdateMyDevicesIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Delete myDevices integration.
|
||||
DeleteMyDevicesIntegration(ctx context.Context, in *DeleteMyDevicesIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Create LoRaCloud integration.
|
||||
CreateLoraCloudIntegration(ctx context.Context, in *CreateLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Get LoRaCloud integration.
|
||||
GetLoraCloudIntegration(ctx context.Context, in *GetLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*GetLoraCloudIntegrationResponse, error)
|
||||
// Update LoRaCloud integration.
|
||||
UpdateLoraCloudIntegration(ctx context.Context, in *UpdateLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Delete LoRaCloud integration.
|
||||
DeleteLoraCloudIntegration(ctx context.Context, in *DeleteLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Create GCP Pub/Sub integration.
|
||||
CreateGcpPubSubIntegration(ctx context.Context, in *CreateGcpPubSubIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Get GCP Pub/Sub integration.
|
||||
@@ -170,6 +162,14 @@ type ApplicationServiceClient interface {
|
||||
UpdateIftttIntegration(ctx context.Context, in *UpdateIftttIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Delete IFTTT integration.
|
||||
DeleteIftttIntegration(ctx context.Context, in *DeleteIftttIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Create Blynk integration.
|
||||
CreateBlynkIntegration(ctx context.Context, in *CreateBlynkIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Get Blynk integration.
|
||||
GetBlynkIntegration(ctx context.Context, in *GetBlynkIntegrationRequest, opts ...grpc.CallOption) (*GetBlynkIntegrationResponse, error)
|
||||
// Update Blynk integration.
|
||||
UpdateBlynkIntegration(ctx context.Context, in *UpdateBlynkIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Delete Blynk integration.
|
||||
DeleteBlynkIntegration(ctx context.Context, in *DeleteBlynkIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Generates application ID specific client-certificate.
|
||||
GenerateMqttIntegrationClientCertificate(ctx context.Context, in *GenerateMqttIntegrationClientCertificateRequest, opts ...grpc.CallOption) (*GenerateMqttIntegrationClientCertificateResponse, error)
|
||||
// List device-profiles used within the given application.
|
||||
@@ -406,46 +406,6 @@ func (c *applicationServiceClient) DeleteMyDevicesIntegration(ctx context.Contex
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) CreateLoraCloudIntegration(ctx context.Context, in *CreateLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_CreateLoraCloudIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) GetLoraCloudIntegration(ctx context.Context, in *GetLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*GetLoraCloudIntegrationResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(GetLoraCloudIntegrationResponse)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_GetLoraCloudIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) UpdateLoraCloudIntegration(ctx context.Context, in *UpdateLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_UpdateLoraCloudIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) DeleteLoraCloudIntegration(ctx context.Context, in *DeleteLoraCloudIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_DeleteLoraCloudIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) CreateGcpPubSubIntegration(ctx context.Context, in *CreateGcpPubSubIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
@@ -646,6 +606,46 @@ func (c *applicationServiceClient) DeleteIftttIntegration(ctx context.Context, i
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) CreateBlynkIntegration(ctx context.Context, in *CreateBlynkIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_CreateBlynkIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) GetBlynkIntegration(ctx context.Context, in *GetBlynkIntegrationRequest, opts ...grpc.CallOption) (*GetBlynkIntegrationResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(GetBlynkIntegrationResponse)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_GetBlynkIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) UpdateBlynkIntegration(ctx context.Context, in *UpdateBlynkIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_UpdateBlynkIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) DeleteBlynkIntegration(ctx context.Context, in *DeleteBlynkIntegrationRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, ApplicationService_DeleteBlynkIntegration_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *applicationServiceClient) GenerateMqttIntegrationClientCertificate(ctx context.Context, in *GenerateMqttIntegrationClientCertificateRequest, opts ...grpc.CallOption) (*GenerateMqttIntegrationClientCertificateResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(GenerateMqttIntegrationClientCertificateResponse)
|
||||
@@ -727,14 +727,6 @@ type ApplicationServiceServer interface {
|
||||
UpdateMyDevicesIntegration(context.Context, *UpdateMyDevicesIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Delete myDevices integration.
|
||||
DeleteMyDevicesIntegration(context.Context, *DeleteMyDevicesIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Create LoRaCloud integration.
|
||||
CreateLoraCloudIntegration(context.Context, *CreateLoraCloudIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Get LoRaCloud integration.
|
||||
GetLoraCloudIntegration(context.Context, *GetLoraCloudIntegrationRequest) (*GetLoraCloudIntegrationResponse, error)
|
||||
// Update LoRaCloud integration.
|
||||
UpdateLoraCloudIntegration(context.Context, *UpdateLoraCloudIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Delete LoRaCloud integration.
|
||||
DeleteLoraCloudIntegration(context.Context, *DeleteLoraCloudIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Create GCP Pub/Sub integration.
|
||||
CreateGcpPubSubIntegration(context.Context, *CreateGcpPubSubIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Get GCP Pub/Sub integration.
|
||||
@@ -775,6 +767,14 @@ type ApplicationServiceServer interface {
|
||||
UpdateIftttIntegration(context.Context, *UpdateIftttIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Delete IFTTT integration.
|
||||
DeleteIftttIntegration(context.Context, *DeleteIftttIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Create Blynk integration.
|
||||
CreateBlynkIntegration(context.Context, *CreateBlynkIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Get Blynk integration.
|
||||
GetBlynkIntegration(context.Context, *GetBlynkIntegrationRequest) (*GetBlynkIntegrationResponse, error)
|
||||
// Update Blynk integration.
|
||||
UpdateBlynkIntegration(context.Context, *UpdateBlynkIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Delete Blynk integration.
|
||||
DeleteBlynkIntegration(context.Context, *DeleteBlynkIntegrationRequest) (*emptypb.Empty, error)
|
||||
// Generates application ID specific client-certificate.
|
||||
GenerateMqttIntegrationClientCertificate(context.Context, *GenerateMqttIntegrationClientCertificateRequest) (*GenerateMqttIntegrationClientCertificateResponse, error)
|
||||
// List device-profiles used within the given application.
|
||||
@@ -857,18 +857,6 @@ func (UnimplementedApplicationServiceServer) UpdateMyDevicesIntegration(context.
|
||||
func (UnimplementedApplicationServiceServer) DeleteMyDevicesIntegration(context.Context, *DeleteMyDevicesIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteMyDevicesIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) CreateLoraCloudIntegration(context.Context, *CreateLoraCloudIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method CreateLoraCloudIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) GetLoraCloudIntegration(context.Context, *GetLoraCloudIntegrationRequest) (*GetLoraCloudIntegrationResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetLoraCloudIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) UpdateLoraCloudIntegration(context.Context, *UpdateLoraCloudIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method UpdateLoraCloudIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) DeleteLoraCloudIntegration(context.Context, *DeleteLoraCloudIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteLoraCloudIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) CreateGcpPubSubIntegration(context.Context, *CreateGcpPubSubIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method CreateGcpPubSubIntegration not implemented")
|
||||
}
|
||||
@@ -929,6 +917,18 @@ func (UnimplementedApplicationServiceServer) UpdateIftttIntegration(context.Cont
|
||||
func (UnimplementedApplicationServiceServer) DeleteIftttIntegration(context.Context, *DeleteIftttIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteIftttIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) CreateBlynkIntegration(context.Context, *CreateBlynkIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method CreateBlynkIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) GetBlynkIntegration(context.Context, *GetBlynkIntegrationRequest) (*GetBlynkIntegrationResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetBlynkIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) UpdateBlynkIntegration(context.Context, *UpdateBlynkIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method UpdateBlynkIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) DeleteBlynkIntegration(context.Context, *DeleteBlynkIntegrationRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteBlynkIntegration not implemented")
|
||||
}
|
||||
func (UnimplementedApplicationServiceServer) GenerateMqttIntegrationClientCertificate(context.Context, *GenerateMqttIntegrationClientCertificateRequest) (*GenerateMqttIntegrationClientCertificateResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GenerateMqttIntegrationClientCertificate not implemented")
|
||||
}
|
||||
@@ -1355,78 +1355,6 @@ func _ApplicationService_DeleteMyDevicesIntegration_Handler(srv interface{}, ctx
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_CreateLoraCloudIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateLoraCloudIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).CreateLoraCloudIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_CreateLoraCloudIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).CreateLoraCloudIntegration(ctx, req.(*CreateLoraCloudIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_GetLoraCloudIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetLoraCloudIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).GetLoraCloudIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_GetLoraCloudIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).GetLoraCloudIntegration(ctx, req.(*GetLoraCloudIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_UpdateLoraCloudIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(UpdateLoraCloudIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).UpdateLoraCloudIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_UpdateLoraCloudIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).UpdateLoraCloudIntegration(ctx, req.(*UpdateLoraCloudIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_DeleteLoraCloudIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(DeleteLoraCloudIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).DeleteLoraCloudIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_DeleteLoraCloudIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).DeleteLoraCloudIntegration(ctx, req.(*DeleteLoraCloudIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_CreateGcpPubSubIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateGcpPubSubIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
@@ -1787,6 +1715,78 @@ func _ApplicationService_DeleteIftttIntegration_Handler(srv interface{}, ctx con
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_CreateBlynkIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateBlynkIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).CreateBlynkIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_CreateBlynkIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).CreateBlynkIntegration(ctx, req.(*CreateBlynkIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_GetBlynkIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetBlynkIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).GetBlynkIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_GetBlynkIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).GetBlynkIntegration(ctx, req.(*GetBlynkIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_UpdateBlynkIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(UpdateBlynkIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).UpdateBlynkIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_UpdateBlynkIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).UpdateBlynkIntegration(ctx, req.(*UpdateBlynkIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_DeleteBlynkIntegration_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(DeleteBlynkIntegrationRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ApplicationServiceServer).DeleteBlynkIntegration(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ApplicationService_DeleteBlynkIntegration_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ApplicationServiceServer).DeleteBlynkIntegration(ctx, req.(*DeleteBlynkIntegrationRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ApplicationService_GenerateMqttIntegrationClientCertificate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GenerateMqttIntegrationClientCertificateRequest)
|
||||
if err := dec(in); err != nil {
|
||||
@@ -1936,22 +1936,6 @@ var ApplicationService_ServiceDesc = grpc.ServiceDesc{
|
||||
MethodName: "DeleteMyDevicesIntegration",
|
||||
Handler: _ApplicationService_DeleteMyDevicesIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "CreateLoraCloudIntegration",
|
||||
Handler: _ApplicationService_CreateLoraCloudIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetLoraCloudIntegration",
|
||||
Handler: _ApplicationService_GetLoraCloudIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "UpdateLoraCloudIntegration",
|
||||
Handler: _ApplicationService_UpdateLoraCloudIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteLoraCloudIntegration",
|
||||
Handler: _ApplicationService_DeleteLoraCloudIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "CreateGcpPubSubIntegration",
|
||||
Handler: _ApplicationService_CreateGcpPubSubIntegration_Handler,
|
||||
@@ -2032,6 +2016,22 @@ var ApplicationService_ServiceDesc = grpc.ServiceDesc{
|
||||
MethodName: "DeleteIftttIntegration",
|
||||
Handler: _ApplicationService_DeleteIftttIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "CreateBlynkIntegration",
|
||||
Handler: _ApplicationService_CreateBlynkIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetBlynkIntegration",
|
||||
Handler: _ApplicationService_GetBlynkIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "UpdateBlynkIntegration",
|
||||
Handler: _ApplicationService_UpdateBlynkIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteBlynkIntegration",
|
||||
Handler: _ApplicationService_DeleteBlynkIntegration_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GenerateMqttIntegrationClientCertificate",
|
||||
Handler: _ApplicationService_GenerateMqttIntegrationClientCertificate_Handler,
|
||||
|
2
api/go/api/device.pb.go
vendored
2
api/go/api/device.pb.go
vendored
@@ -1918,6 +1918,8 @@ type DeviceQueueItem struct {
|
||||
// Confirmed.
|
||||
Confirmed bool `protobuf:"varint,3,opt,name=confirmed,proto3" json:"confirmed,omitempty"`
|
||||
// FPort (must be > 0).
|
||||
// On enqueue and if using a JavaScript codec, this value might be
|
||||
// automatically set by the codec function.
|
||||
FPort uint32 `protobuf:"varint,4,opt,name=f_port,json=fPort,proto3" json:"f_port,omitempty"`
|
||||
// Data.
|
||||
// Or use the json_object field when a codec has been configured.
|
||||
|
2028
api/go/api/fuota.pb.go
vendored
Normal file
2028
api/go/api/fuota.pb.go
vendored
Normal file
File diff suppressed because it is too large
Load Diff
614
api/go/api/fuota_grpc.pb.go
vendored
Normal file
614
api/go/api/fuota_grpc.pb.go
vendored
Normal file
@@ -0,0 +1,614 @@
|
||||
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
|
||||
// versions:
|
||||
// - protoc-gen-go-grpc v1.5.1
|
||||
// - protoc v6.30.2
|
||||
// source: api/fuota.proto
|
||||
|
||||
package api
|
||||
|
||||
import (
|
||||
context "context"
|
||||
grpc "google.golang.org/grpc"
|
||||
codes "google.golang.org/grpc/codes"
|
||||
status "google.golang.org/grpc/status"
|
||||
emptypb "google.golang.org/protobuf/types/known/emptypb"
|
||||
)
|
||||
|
||||
// This is a compile-time assertion to ensure that this generated file
|
||||
// is compatible with the grpc package it is being compiled against.
|
||||
// Requires gRPC-Go v1.64.0 or later.
|
||||
const _ = grpc.SupportPackageIsVersion9
|
||||
|
||||
const (
|
||||
FuotaService_CreateDeployment_FullMethodName = "/api.FuotaService/CreateDeployment"
|
||||
FuotaService_GetDeployment_FullMethodName = "/api.FuotaService/GetDeployment"
|
||||
FuotaService_UpdateDeployment_FullMethodName = "/api.FuotaService/UpdateDeployment"
|
||||
FuotaService_DeleteDeployment_FullMethodName = "/api.FuotaService/DeleteDeployment"
|
||||
FuotaService_StartDeployment_FullMethodName = "/api.FuotaService/StartDeployment"
|
||||
FuotaService_ListDeployments_FullMethodName = "/api.FuotaService/ListDeployments"
|
||||
FuotaService_AddDevices_FullMethodName = "/api.FuotaService/AddDevices"
|
||||
FuotaService_RemoveDevices_FullMethodName = "/api.FuotaService/RemoveDevices"
|
||||
FuotaService_ListDevices_FullMethodName = "/api.FuotaService/ListDevices"
|
||||
FuotaService_AddGateways_FullMethodName = "/api.FuotaService/AddGateways"
|
||||
FuotaService_ListGateways_FullMethodName = "/api.FuotaService/ListGateways"
|
||||
FuotaService_RemoveGateways_FullMethodName = "/api.FuotaService/RemoveGateways"
|
||||
FuotaService_ListJobs_FullMethodName = "/api.FuotaService/ListJobs"
|
||||
)
|
||||
|
||||
// FuotaServiceClient is the client API for FuotaService service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
//
|
||||
// FuotaService is the service providing API methods for FUOTA deployments.
|
||||
type FuotaServiceClient interface {
|
||||
// Create the given FUOTA deployment.
|
||||
CreateDeployment(ctx context.Context, in *CreateFuotaDeploymentRequest, opts ...grpc.CallOption) (*CreateFuotaDeploymentResponse, error)
|
||||
// Get the FUOTA deployment for the given ID.
|
||||
GetDeployment(ctx context.Context, in *GetFuotaDeploymentRequest, opts ...grpc.CallOption) (*GetFuotaDeploymentResponse, error)
|
||||
// Update the given FUOTA deployment.
|
||||
UpdateDeployment(ctx context.Context, in *UpdateFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Delete the FUOTA deployment for the given ID.
|
||||
DeleteDeployment(ctx context.Context, in *DeleteFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Start the FUOTA deployment.
|
||||
StartDeployment(ctx context.Context, in *StartFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// List the FUOTA deployments.
|
||||
ListDeployments(ctx context.Context, in *ListFuotaDeploymentsRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentsResponse, error)
|
||||
// Add the given DevEUIs to the FUOTA deployment.
|
||||
AddDevices(ctx context.Context, in *AddDevicesToFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// Remove the given DevEUIs from the FUOTA deployment.
|
||||
RemoveDevices(ctx context.Context, in *RemoveDevicesFromFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// List FUOTA Deployment devices.
|
||||
ListDevices(ctx context.Context, in *ListFuotaDeploymentDevicesRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentDevicesResponse, error)
|
||||
// Add the given Gateway IDs to the FUOTA deployment.
|
||||
// By default, ChirpStack will automatically select the minimum amount of
|
||||
// gateways needed to cover all devices within the multicast-group. Setting
|
||||
// the gateways manually overrides this behaviour.
|
||||
AddGateways(ctx context.Context, in *AddGatewaysToFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// List the gateways added to the FUOTA deployment.
|
||||
ListGateways(ctx context.Context, in *ListFuotaDeploymentGatewaysRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentGatewaysResponse, error)
|
||||
// Remove the given Gateway IDs from the FUOTA deployment.
|
||||
RemoveGateways(ctx context.Context, in *RemoveGatewaysFromFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error)
|
||||
// List jobs for the given FUOTA deployment.
|
||||
ListJobs(ctx context.Context, in *ListFuotaDeploymentJobsRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentJobsResponse, error)
|
||||
}
|
||||
|
||||
type fuotaServiceClient struct {
|
||||
cc grpc.ClientConnInterface
|
||||
}
|
||||
|
||||
func NewFuotaServiceClient(cc grpc.ClientConnInterface) FuotaServiceClient {
|
||||
return &fuotaServiceClient{cc}
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) CreateDeployment(ctx context.Context, in *CreateFuotaDeploymentRequest, opts ...grpc.CallOption) (*CreateFuotaDeploymentResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(CreateFuotaDeploymentResponse)
|
||||
err := c.cc.Invoke(ctx, FuotaService_CreateDeployment_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) GetDeployment(ctx context.Context, in *GetFuotaDeploymentRequest, opts ...grpc.CallOption) (*GetFuotaDeploymentResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(GetFuotaDeploymentResponse)
|
||||
err := c.cc.Invoke(ctx, FuotaService_GetDeployment_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) UpdateDeployment(ctx context.Context, in *UpdateFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_UpdateDeployment_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) DeleteDeployment(ctx context.Context, in *DeleteFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_DeleteDeployment_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) StartDeployment(ctx context.Context, in *StartFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_StartDeployment_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) ListDeployments(ctx context.Context, in *ListFuotaDeploymentsRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentsResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(ListFuotaDeploymentsResponse)
|
||||
err := c.cc.Invoke(ctx, FuotaService_ListDeployments_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) AddDevices(ctx context.Context, in *AddDevicesToFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_AddDevices_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) RemoveDevices(ctx context.Context, in *RemoveDevicesFromFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_RemoveDevices_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) ListDevices(ctx context.Context, in *ListFuotaDeploymentDevicesRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentDevicesResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(ListFuotaDeploymentDevicesResponse)
|
||||
err := c.cc.Invoke(ctx, FuotaService_ListDevices_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) AddGateways(ctx context.Context, in *AddGatewaysToFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_AddGateways_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) ListGateways(ctx context.Context, in *ListFuotaDeploymentGatewaysRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentGatewaysResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(ListFuotaDeploymentGatewaysResponse)
|
||||
err := c.cc.Invoke(ctx, FuotaService_ListGateways_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) RemoveGateways(ctx context.Context, in *RemoveGatewaysFromFuotaDeploymentRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(emptypb.Empty)
|
||||
err := c.cc.Invoke(ctx, FuotaService_RemoveGateways_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *fuotaServiceClient) ListJobs(ctx context.Context, in *ListFuotaDeploymentJobsRequest, opts ...grpc.CallOption) (*ListFuotaDeploymentJobsResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(ListFuotaDeploymentJobsResponse)
|
||||
err := c.cc.Invoke(ctx, FuotaService_ListJobs_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// FuotaServiceServer is the server API for FuotaService service.
|
||||
// All implementations must embed UnimplementedFuotaServiceServer
|
||||
// for forward compatibility.
|
||||
//
|
||||
// FuotaService is the service providing API methods for FUOTA deployments.
|
||||
type FuotaServiceServer interface {
|
||||
// Create the given FUOTA deployment.
|
||||
CreateDeployment(context.Context, *CreateFuotaDeploymentRequest) (*CreateFuotaDeploymentResponse, error)
|
||||
// Get the FUOTA deployment for the given ID.
|
||||
GetDeployment(context.Context, *GetFuotaDeploymentRequest) (*GetFuotaDeploymentResponse, error)
|
||||
// Update the given FUOTA deployment.
|
||||
UpdateDeployment(context.Context, *UpdateFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// Delete the FUOTA deployment for the given ID.
|
||||
DeleteDeployment(context.Context, *DeleteFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// Start the FUOTA deployment.
|
||||
StartDeployment(context.Context, *StartFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// List the FUOTA deployments.
|
||||
ListDeployments(context.Context, *ListFuotaDeploymentsRequest) (*ListFuotaDeploymentsResponse, error)
|
||||
// Add the given DevEUIs to the FUOTA deployment.
|
||||
AddDevices(context.Context, *AddDevicesToFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// Remove the given DevEUIs from the FUOTA deployment.
|
||||
RemoveDevices(context.Context, *RemoveDevicesFromFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// List FUOTA Deployment devices.
|
||||
ListDevices(context.Context, *ListFuotaDeploymentDevicesRequest) (*ListFuotaDeploymentDevicesResponse, error)
|
||||
// Add the given Gateway IDs to the FUOTA deployment.
|
||||
// By default, ChirpStack will automatically select the minimum amount of
|
||||
// gateways needed to cover all devices within the multicast-group. Setting
|
||||
// the gateways manually overrides this behaviour.
|
||||
AddGateways(context.Context, *AddGatewaysToFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// List the gateways added to the FUOTA deployment.
|
||||
ListGateways(context.Context, *ListFuotaDeploymentGatewaysRequest) (*ListFuotaDeploymentGatewaysResponse, error)
|
||||
// Remove the given Gateway IDs from the FUOTA deployment.
|
||||
RemoveGateways(context.Context, *RemoveGatewaysFromFuotaDeploymentRequest) (*emptypb.Empty, error)
|
||||
// List jobs for the given FUOTA deployment.
|
||||
ListJobs(context.Context, *ListFuotaDeploymentJobsRequest) (*ListFuotaDeploymentJobsResponse, error)
|
||||
mustEmbedUnimplementedFuotaServiceServer()
|
||||
}
|
||||
|
||||
// UnimplementedFuotaServiceServer must be embedded to have
|
||||
// forward compatible implementations.
|
||||
//
|
||||
// NOTE: this should be embedded by value instead of pointer to avoid a nil
|
||||
// pointer dereference when methods are called.
|
||||
type UnimplementedFuotaServiceServer struct{}
|
||||
|
||||
func (UnimplementedFuotaServiceServer) CreateDeployment(context.Context, *CreateFuotaDeploymentRequest) (*CreateFuotaDeploymentResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method CreateDeployment not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) GetDeployment(context.Context, *GetFuotaDeploymentRequest) (*GetFuotaDeploymentResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetDeployment not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) UpdateDeployment(context.Context, *UpdateFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method UpdateDeployment not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) DeleteDeployment(context.Context, *DeleteFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method DeleteDeployment not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) StartDeployment(context.Context, *StartFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method StartDeployment not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) ListDeployments(context.Context, *ListFuotaDeploymentsRequest) (*ListFuotaDeploymentsResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListDeployments not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) AddDevices(context.Context, *AddDevicesToFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method AddDevices not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) RemoveDevices(context.Context, *RemoveDevicesFromFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method RemoveDevices not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) ListDevices(context.Context, *ListFuotaDeploymentDevicesRequest) (*ListFuotaDeploymentDevicesResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListDevices not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) AddGateways(context.Context, *AddGatewaysToFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method AddGateways not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) ListGateways(context.Context, *ListFuotaDeploymentGatewaysRequest) (*ListFuotaDeploymentGatewaysResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListGateways not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) RemoveGateways(context.Context, *RemoveGatewaysFromFuotaDeploymentRequest) (*emptypb.Empty, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method RemoveGateways not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) ListJobs(context.Context, *ListFuotaDeploymentJobsRequest) (*ListFuotaDeploymentJobsResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method ListJobs not implemented")
|
||||
}
|
||||
func (UnimplementedFuotaServiceServer) mustEmbedUnimplementedFuotaServiceServer() {}
|
||||
func (UnimplementedFuotaServiceServer) testEmbeddedByValue() {}
|
||||
|
||||
// UnsafeFuotaServiceServer may be embedded to opt out of forward compatibility for this service.
|
||||
// Use of this interface is not recommended, as added methods to FuotaServiceServer will
|
||||
// result in compilation errors.
|
||||
type UnsafeFuotaServiceServer interface {
|
||||
mustEmbedUnimplementedFuotaServiceServer()
|
||||
}
|
||||
|
||||
func RegisterFuotaServiceServer(s grpc.ServiceRegistrar, srv FuotaServiceServer) {
|
||||
// If the following call pancis, it indicates UnimplementedFuotaServiceServer was
|
||||
// embedded by pointer and is nil. This will cause panics if an
|
||||
// unimplemented method is ever invoked, so we test this at initialization
|
||||
// time to prevent it from happening at runtime later due to I/O.
|
||||
if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
|
||||
t.testEmbeddedByValue()
|
||||
}
|
||||
s.RegisterService(&FuotaService_ServiceDesc, srv)
|
||||
}
|
||||
|
||||
func _FuotaService_CreateDeployment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(CreateFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).CreateDeployment(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_CreateDeployment_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).CreateDeployment(ctx, req.(*CreateFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_GetDeployment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).GetDeployment(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_GetDeployment_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).GetDeployment(ctx, req.(*GetFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_UpdateDeployment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(UpdateFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).UpdateDeployment(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_UpdateDeployment_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).UpdateDeployment(ctx, req.(*UpdateFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_DeleteDeployment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(DeleteFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).DeleteDeployment(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_DeleteDeployment_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).DeleteDeployment(ctx, req.(*DeleteFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_StartDeployment_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(StartFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).StartDeployment(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_StartDeployment_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).StartDeployment(ctx, req.(*StartFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_ListDeployments_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListFuotaDeploymentsRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).ListDeployments(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_ListDeployments_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).ListDeployments(ctx, req.(*ListFuotaDeploymentsRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_AddDevices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(AddDevicesToFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).AddDevices(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_AddDevices_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).AddDevices(ctx, req.(*AddDevicesToFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_RemoveDevices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(RemoveDevicesFromFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).RemoveDevices(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_RemoveDevices_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).RemoveDevices(ctx, req.(*RemoveDevicesFromFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_ListDevices_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListFuotaDeploymentDevicesRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).ListDevices(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_ListDevices_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).ListDevices(ctx, req.(*ListFuotaDeploymentDevicesRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_AddGateways_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(AddGatewaysToFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).AddGateways(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_AddGateways_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).AddGateways(ctx, req.(*AddGatewaysToFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_ListGateways_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListFuotaDeploymentGatewaysRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).ListGateways(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_ListGateways_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).ListGateways(ctx, req.(*ListFuotaDeploymentGatewaysRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_RemoveGateways_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(RemoveGatewaysFromFuotaDeploymentRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).RemoveGateways(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_RemoveGateways_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).RemoveGateways(ctx, req.(*RemoveGatewaysFromFuotaDeploymentRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _FuotaService_ListJobs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ListFuotaDeploymentJobsRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(FuotaServiceServer).ListJobs(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: FuotaService_ListJobs_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(FuotaServiceServer).ListJobs(ctx, req.(*ListFuotaDeploymentJobsRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
// FuotaService_ServiceDesc is the grpc.ServiceDesc for FuotaService service.
|
||||
// It's only intended for direct use with grpc.RegisterService,
|
||||
// and not to be introspected or modified (even as a copy)
|
||||
var FuotaService_ServiceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "api.FuotaService",
|
||||
HandlerType: (*FuotaServiceServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "CreateDeployment",
|
||||
Handler: _FuotaService_CreateDeployment_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetDeployment",
|
||||
Handler: _FuotaService_GetDeployment_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "UpdateDeployment",
|
||||
Handler: _FuotaService_UpdateDeployment_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "DeleteDeployment",
|
||||
Handler: _FuotaService_DeleteDeployment_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "StartDeployment",
|
||||
Handler: _FuotaService_StartDeployment_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "ListDeployments",
|
||||
Handler: _FuotaService_ListDeployments_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "AddDevices",
|
||||
Handler: _FuotaService_AddDevices_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "RemoveDevices",
|
||||
Handler: _FuotaService_RemoveDevices_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "ListDevices",
|
||||
Handler: _FuotaService_ListDevices_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "AddGateways",
|
||||
Handler: _FuotaService_AddGateways_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "ListGateways",
|
||||
Handler: _FuotaService_ListGateways_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "RemoveGateways",
|
||||
Handler: _FuotaService_RemoveGateways_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "ListJobs",
|
||||
Handler: _FuotaService_ListJobs_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "api/fuota.proto",
|
||||
}
|
29
api/go/integration/integration.pb.go
vendored
29
api/go/integration/integration.pb.go
vendored
@@ -1246,7 +1246,10 @@ type DownlinkCommand struct {
|
||||
Data []byte `protobuf:"bytes,5,opt,name=data,proto3" json:"data,omitempty"`
|
||||
// Only use this when a codec has been configured that can encode this
|
||||
// object to bytes.
|
||||
Object *structpb.Struct `protobuf:"bytes,6,opt,name=object,proto3" json:"object,omitempty"`
|
||||
Object *structpb.Struct `protobuf:"bytes,6,opt,name=object,proto3" json:"object,omitempty"`
|
||||
// Expires at (optional).
|
||||
// Expired queue-items will be automatically removed from the queue.
|
||||
ExpiresAt *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=expires_at,json=expiresAt,proto3" json:"expires_at,omitempty"`
|
||||
unknownFields protoimpl.UnknownFields
|
||||
sizeCache protoimpl.SizeCache
|
||||
}
|
||||
@@ -1323,6 +1326,13 @@ func (x *DownlinkCommand) GetObject() *structpb.Struct {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *DownlinkCommand) GetExpiresAt() *timestamppb.Timestamp {
|
||||
if x != nil {
|
||||
return x.ExpiresAt
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var File_integration_integration_proto protoreflect.FileDescriptor
|
||||
|
||||
const file_integration_integration_proto_rawDesc = "" +
|
||||
@@ -1438,14 +1448,16 @@ const file_integration_integration_proto_rawDesc = "" +
|
||||
"\x10integration_name\x18\x04 \x01(\tR\x0fintegrationName\x12\x1d\n" +
|
||||
"\n" +
|
||||
"event_type\x18\x05 \x01(\tR\teventType\x12/\n" +
|
||||
"\x06object\x18\x06 \x01(\v2\x17.google.protobuf.StructR\x06object\"\xb4\x01\n" +
|
||||
"\x06object\x18\x06 \x01(\v2\x17.google.protobuf.StructR\x06object\"\xef\x01\n" +
|
||||
"\x0fDownlinkCommand\x12\x0e\n" +
|
||||
"\x02id\x18\x01 \x01(\tR\x02id\x12\x17\n" +
|
||||
"\adev_eui\x18\x02 \x01(\tR\x06devEui\x12\x1c\n" +
|
||||
"\tconfirmed\x18\x03 \x01(\bR\tconfirmed\x12\x15\n" +
|
||||
"\x06f_port\x18\x04 \x01(\rR\x05fPort\x12\x12\n" +
|
||||
"\x04data\x18\x05 \x01(\fR\x04data\x12/\n" +
|
||||
"\x06object\x18\x06 \x01(\v2\x17.google.protobuf.StructR\x06object*,\n" +
|
||||
"\x06object\x18\x06 \x01(\v2\x17.google.protobuf.StructR\x06object\x129\n" +
|
||||
"\n" +
|
||||
"expires_at\x18\a \x01(\v2\x1a.google.protobuf.TimestampR\texpiresAt*,\n" +
|
||||
"\bLogLevel\x12\b\n" +
|
||||
"\x04INFO\x10\x00\x12\v\n" +
|
||||
"\aWARNING\x10\x01\x12\t\n" +
|
||||
@@ -1540,11 +1552,12 @@ var file_integration_integration_proto_depIdxs = []int32{
|
||||
2, // 29: integration.IntegrationEvent.device_info:type_name -> integration.DeviceInfo
|
||||
17, // 30: integration.IntegrationEvent.object:type_name -> google.protobuf.Struct
|
||||
17, // 31: integration.DownlinkCommand.object:type_name -> google.protobuf.Struct
|
||||
32, // [32:32] is the sub-list for method output_type
|
||||
32, // [32:32] is the sub-list for method input_type
|
||||
32, // [32:32] is the sub-list for extension type_name
|
||||
32, // [32:32] is the sub-list for extension extendee
|
||||
0, // [0:32] is the sub-list for field type_name
|
||||
16, // 32: integration.DownlinkCommand.expires_at:type_name -> google.protobuf.Timestamp
|
||||
33, // [33:33] is the sub-list for method output_type
|
||||
33, // [33:33] is the sub-list for method input_type
|
||||
33, // [33:33] is the sub-list for extension type_name
|
||||
33, // [33:33] is the sub-list for extension extendee
|
||||
0, // [0:33] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_integration_integration_proto_init() }
|
||||
|
2
api/grpc-web/package.json
vendored
2
api/grpc-web/package.json
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@chirpstack/chirpstack-api-grpc-web",
|
||||
"version": "4.13.0",
|
||||
"version": "4.14.0",
|
||||
"description": "Chirpstack gRPC-web API",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
|
2
api/java/build.gradle.kts
vendored
2
api/java/build.gradle.kts
vendored
@@ -8,7 +8,7 @@ plugins {
|
||||
}
|
||||
|
||||
group = "io.chirpstack"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
|
2
api/js/package.json
vendored
2
api/js/package.json
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@chirpstack/chirpstack-api",
|
||||
"version": "4.13.0",
|
||||
"version": "4.14.0",
|
||||
"description": "Chirpstack JS and TS API",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
|
2
api/kotlin/build.gradle.kts
vendored
2
api/kotlin/build.gradle.kts
vendored
@@ -9,7 +9,7 @@ plugins {
|
||||
}
|
||||
|
||||
group = "io.chirpstack"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
|
3
api/md/Makefile
vendored
3
api/md/Makefile
vendored
@@ -16,4 +16,5 @@ api:
|
||||
api/multicast_group.proto \
|
||||
api/relay.proto \
|
||||
api/tenant.proto \
|
||||
api/user.proto
|
||||
api/user.proto \
|
||||
api/fuota.proto
|
||||
|
3
api/php/Makefile
vendored
3
api/php/Makefile
vendored
@@ -25,6 +25,7 @@ api:
|
||||
protoc ${PROTOC_ARGS} api/gateway.proto
|
||||
protoc ${PROTOC_ARGS} api/multicast_group.proto
|
||||
protoc ${PROTOC_ARGS} api/relay.proto
|
||||
protoc ${PROTOC_ARGS} api/fuota.proto
|
||||
|
||||
integration:
|
||||
protoc ${PROTOC_ARGS} integration/integration.proto
|
||||
@@ -37,4 +38,4 @@ stream:
|
||||
|
||||
google:
|
||||
protoc ${PROTOC_ARGS} google/api/annotations.proto
|
||||
protoc ${PROTOC_ARGS} google/api/http.proto
|
||||
protoc ${PROTOC_ARGS} google/api/http.proto
|
||||
|
2
api/php/composer.json
vendored
2
api/php/composer.json
vendored
@@ -3,7 +3,7 @@
|
||||
"description": "Chirpstack PHP API",
|
||||
"license": "MIT",
|
||||
"type": "library",
|
||||
"version": "4.13.0",
|
||||
"version": "4.14.0",
|
||||
"require": {
|
||||
"php": ">=7.0.0",
|
||||
"grpc/grpc": "^v1.57.0",
|
||||
|
209
api/proto/api/application.proto
vendored
209
api/proto/api/application.proto
vendored
@@ -204,42 +204,6 @@ service ApplicationService {
|
||||
};
|
||||
}
|
||||
|
||||
// Create LoRaCloud integration.
|
||||
rpc CreateLoraCloudIntegration(CreateLoraCloudIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
post : "/api/applications/{integration.application_id}/integrations/"
|
||||
"loracloud"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Get LoRaCloud integration.
|
||||
rpc GetLoraCloudIntegration(GetLoraCloudIntegrationRequest)
|
||||
returns (GetLoraCloudIntegrationResponse) {
|
||||
option (google.api.http) = {
|
||||
get : "/api/applications/{application_id}/integrations/loracloud"
|
||||
};
|
||||
}
|
||||
|
||||
// Update LoRaCloud integration.
|
||||
rpc UpdateLoraCloudIntegration(UpdateLoraCloudIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
put : "/api/applications/{integration.application_id}/integrations/"
|
||||
"loracloud"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Delete LoRaCloud integration.
|
||||
rpc DeleteLoraCloudIntegration(DeleteLoraCloudIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete : "/api/applications/{application_id}/integrations/loracloud"
|
||||
};
|
||||
}
|
||||
|
||||
// Create GCP Pub/Sub integration.
|
||||
rpc CreateGcpPubSubIntegration(CreateGcpPubSubIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
@@ -419,6 +383,40 @@ service ApplicationService {
|
||||
};
|
||||
}
|
||||
|
||||
// Create Blynk integration.
|
||||
rpc CreateBlynkIntegration(CreateBlynkIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
post : "/api/applications/{integration.application_id}/integrations/blynk"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Get Blynk integration.
|
||||
rpc GetBlynkIntegration(GetBlynkIntegrationRequest)
|
||||
returns (GetBlynkIntegrationResponse) {
|
||||
option (google.api.http) = {
|
||||
get : "/api/applications/{application_id}/integrations/blynk"
|
||||
};
|
||||
}
|
||||
|
||||
// Update Blynk integration.
|
||||
rpc UpdateBlynkIntegration(UpdateBlynkIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
put : "/api/applications/{integration.application_id}/integrations/blynk"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Delete Blynk integration.
|
||||
rpc DeleteBlynkIntegration(DeleteBlynkIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete : "/api/applications/{application_id}/integrations/blynk"
|
||||
};
|
||||
}
|
||||
|
||||
// Generates application ID specific client-certificate.
|
||||
rpc GenerateMqttIntegrationClientCertificate(
|
||||
GenerateMqttIntegrationClientCertificateRequest)
|
||||
@@ -453,13 +451,13 @@ enum IntegrationKind {
|
||||
INFLUX_DB = 1;
|
||||
THINGS_BOARD = 2;
|
||||
MY_DEVICES = 3;
|
||||
LORA_CLOUD = 4;
|
||||
GCP_PUB_SUB = 5;
|
||||
AWS_SNS = 6;
|
||||
AZURE_SERVICE_BUS = 7;
|
||||
PILOT_THINGS = 8;
|
||||
MQTT_GLOBAL = 9;
|
||||
IFTTT = 10;
|
||||
BLYNK = 11;
|
||||
}
|
||||
|
||||
message Application {
|
||||
@@ -764,110 +762,6 @@ message DeleteMyDevicesIntegrationRequest {
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message LoraCloudIntegration {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
|
||||
// Modem & Geolocation Services configuration.
|
||||
LoraCloudModemGeolocationServices modem_geolocation_services = 2;
|
||||
}
|
||||
|
||||
message LoraCloudModemGeolocationServices {
|
||||
// API token.
|
||||
string token = 1;
|
||||
|
||||
// Device implements Modem / Modem-E stack.
|
||||
bool modem_enabled = 2;
|
||||
|
||||
// Forward FPorts.
|
||||
// Forward uplink messages matching the given FPorts to the MGS.
|
||||
repeated uint32 forward_f_ports = 16;
|
||||
|
||||
// Use rx time for GNSS resolving.
|
||||
// In case this is set to true, the MGS resolver will use the RX time of the
|
||||
// network instead of the timestamp included in the LR1110 payload.
|
||||
bool gnss_use_rx_time = 5;
|
||||
|
||||
// Use gateway location for GNSS resolving.
|
||||
// In the case this is set to true, ChirpStack will provide the location of
|
||||
// one of the gateways to the MGS resolver to aid the resolving process.
|
||||
// Disable this in case the gateway location is not accurate / incorrectly
|
||||
// configured as an incorrect location will cause the resolver to return an
|
||||
// error.
|
||||
bool gnss_use_gateway_location = 17;
|
||||
|
||||
// Parse TLV records.
|
||||
// If enabled, stream records (expected in TLV format) are scanned for GNSS
|
||||
// data (0x06 or 0x07). If found, ChirpStack will make an additional
|
||||
// geolocation call to the MGS API for resolving the location of the detected
|
||||
// payload.
|
||||
bool parse_tlv = 6;
|
||||
|
||||
// Geolocation buffer TTL (in seconds).
|
||||
// If > 0, uplink RX meta-data will be stored in a buffer so that
|
||||
// the meta-data of multiple uplinks can be used for geolocation.
|
||||
uint32 geolocation_buffer_ttl = 7;
|
||||
|
||||
// Geolocation minimum buffer size.
|
||||
// If > 0, geolocation will only be performed when the buffer has
|
||||
// at least the given size.
|
||||
uint32 geolocation_min_buffer_size = 8;
|
||||
|
||||
// TDOA based geolocation is enabled.
|
||||
bool geolocation_tdoa = 9;
|
||||
|
||||
// RSSI based geolocation is enabled.
|
||||
bool geolocation_rssi = 10;
|
||||
|
||||
// GNSS based geolocation is enabled (LR1110).
|
||||
bool geolocation_gnss = 11;
|
||||
|
||||
// GNSS payload field.
|
||||
// This holds the name of the field in the decoded payload object which
|
||||
// contains the GNSS payload bytes (as HEX string).
|
||||
string geolocation_gnss_payload_field = 12;
|
||||
|
||||
// GNSS use RX time.
|
||||
// In case this is set to true, the resolver will use the RX time of the
|
||||
// network instead of the timestamp included in the LR1110 payload.
|
||||
bool geolocation_gnss_use_rx_time = 13;
|
||||
|
||||
// Wifi based geolocation is enabled.
|
||||
bool geolocation_wifi = 14;
|
||||
|
||||
// Wifi payload field.
|
||||
// This holds the name of the field in the decoded payload object which
|
||||
// contains an array of objects with the following fields:
|
||||
// * macAddress - e.g. 01:23:45:67:89:ab
|
||||
// * signalStrength - e.g. -51 (optional)
|
||||
string geolocation_wifi_payload_field = 15;
|
||||
}
|
||||
|
||||
message CreateLoraCloudIntegrationRequest {
|
||||
// Integration object to create.
|
||||
LoraCloudIntegration integration = 1;
|
||||
}
|
||||
|
||||
message GetLoraCloudIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GetLoraCloudIntegrationResponse {
|
||||
// Integration object.
|
||||
LoraCloudIntegration integration = 1;
|
||||
}
|
||||
|
||||
message UpdateLoraCloudIntegrationRequest {
|
||||
// Integration object to update.
|
||||
LoraCloudIntegration integration = 1;
|
||||
}
|
||||
|
||||
message DeleteLoraCloudIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GcpPubSubIntegration {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
@@ -1095,6 +989,39 @@ message DeleteIftttIntegrationRequest {
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message BlynkIntegration {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
|
||||
// Blynk integration token.
|
||||
string token = 2;
|
||||
}
|
||||
|
||||
message CreateBlynkIntegrationRequest {
|
||||
// Integration object to create.
|
||||
BlynkIntegration integration = 1;
|
||||
}
|
||||
|
||||
message GetBlynkIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GetBlynkIntegrationResponse {
|
||||
// Integration object.
|
||||
BlynkIntegration integration = 1;
|
||||
}
|
||||
|
||||
message UpdateBlynkIntegrationRequest {
|
||||
// Integration object to update.
|
||||
BlynkIntegration integration = 1;
|
||||
}
|
||||
|
||||
message DeleteBlynkIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GenerateMqttIntegrationClientCertificateRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
|
2
api/proto/api/device.proto
vendored
2
api/proto/api/device.proto
vendored
@@ -543,6 +543,8 @@ message DeviceQueueItem {
|
||||
bool confirmed = 3;
|
||||
|
||||
// FPort (must be > 0).
|
||||
// On enqueue and if using a JavaScript codec, this value might be
|
||||
// automatically set by the codec function.
|
||||
uint32 f_port = 4;
|
||||
|
||||
// Data.
|
||||
|
4
api/proto/integration/integration.proto
vendored
4
api/proto/integration/integration.proto
vendored
@@ -360,4 +360,8 @@ message DownlinkCommand {
|
||||
// Only use this when a codec has been configured that can encode this
|
||||
// object to bytes.
|
||||
google.protobuf.Struct object = 6;
|
||||
|
||||
// Expires at (optional).
|
||||
// Expired queue-items will be automatically removed from the queue.
|
||||
google.protobuf.Timestamp expires_at = 7;
|
||||
}
|
||||
|
1
api/python/Makefile
vendored
1
api/python/Makefile
vendored
@@ -38,6 +38,7 @@ api:
|
||||
$(PROTOC) ${PROTOC_ARGS} chirpstack-api/api/gateway.proto
|
||||
$(PROTOC) ${PROTOC_ARGS} chirpstack-api/api/multicast_group.proto
|
||||
$(PROTOC) ${PROTOC_ARGS} chirpstack-api/api/relay.proto
|
||||
$(PROTOC) ${PROTOC_ARGS} chirpstack-api/api/fuota.proto
|
||||
|
||||
integration:
|
||||
$(PROTOC) ${PROTOC_ARGS} chirpstack-api/integration/integration.proto
|
||||
|
@@ -18,3 +18,5 @@ from .user_pb2 import *
|
||||
from .user_pb2_grpc import *
|
||||
from .relay_pb2 import *
|
||||
from .relay_pb2_grpc import *
|
||||
from .fuota_pb2 import *
|
||||
from .fuota_pb2_grpc import *
|
||||
|
2
api/python/src/setup.py
vendored
2
api/python/src/setup.py
vendored
@@ -18,7 +18,7 @@ CLASSIFIERS = [
|
||||
|
||||
setup(
|
||||
name='chirpstack-api',
|
||||
version = "4.13.0",
|
||||
version = "4.14.0",
|
||||
url='https://github.com/brocaar/chirpstack-api',
|
||||
author='Orne Brocaar',
|
||||
author_email='info@brocaar.com',
|
||||
|
2
api/rust/Cargo.toml
vendored
2
api/rust/Cargo.toml
vendored
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "chirpstack_api"
|
||||
description = "ChirpStack Protobuf / gRPC API definitions."
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
authors = ["Orne Brocaar <info@brocaar.com>"]
|
||||
license = "MIT"
|
||||
homepage = "https://www.chirpstack.io"
|
||||
|
209
api/rust/proto/chirpstack/api/application.proto
vendored
209
api/rust/proto/chirpstack/api/application.proto
vendored
@@ -204,42 +204,6 @@ service ApplicationService {
|
||||
};
|
||||
}
|
||||
|
||||
// Create LoRaCloud integration.
|
||||
rpc CreateLoraCloudIntegration(CreateLoraCloudIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
post : "/api/applications/{integration.application_id}/integrations/"
|
||||
"loracloud"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Get LoRaCloud integration.
|
||||
rpc GetLoraCloudIntegration(GetLoraCloudIntegrationRequest)
|
||||
returns (GetLoraCloudIntegrationResponse) {
|
||||
option (google.api.http) = {
|
||||
get : "/api/applications/{application_id}/integrations/loracloud"
|
||||
};
|
||||
}
|
||||
|
||||
// Update LoRaCloud integration.
|
||||
rpc UpdateLoraCloudIntegration(UpdateLoraCloudIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
put : "/api/applications/{integration.application_id}/integrations/"
|
||||
"loracloud"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Delete LoRaCloud integration.
|
||||
rpc DeleteLoraCloudIntegration(DeleteLoraCloudIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete : "/api/applications/{application_id}/integrations/loracloud"
|
||||
};
|
||||
}
|
||||
|
||||
// Create GCP Pub/Sub integration.
|
||||
rpc CreateGcpPubSubIntegration(CreateGcpPubSubIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
@@ -419,6 +383,40 @@ service ApplicationService {
|
||||
};
|
||||
}
|
||||
|
||||
// Create Blynk integration.
|
||||
rpc CreateBlynkIntegration(CreateBlynkIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
post : "/api/applications/{integration.application_id}/integrations/blynk"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Get Blynk integration.
|
||||
rpc GetBlynkIntegration(GetBlynkIntegrationRequest)
|
||||
returns (GetBlynkIntegrationResponse) {
|
||||
option (google.api.http) = {
|
||||
get : "/api/applications/{application_id}/integrations/blynk"
|
||||
};
|
||||
}
|
||||
|
||||
// Update Blynk integration.
|
||||
rpc UpdateBlynkIntegration(UpdateBlynkIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
put : "/api/applications/{integration.application_id}/integrations/blynk"
|
||||
body : "*"
|
||||
};
|
||||
}
|
||||
|
||||
// Delete Blynk integration.
|
||||
rpc DeleteBlynkIntegration(DeleteBlynkIntegrationRequest)
|
||||
returns (google.protobuf.Empty) {
|
||||
option (google.api.http) = {
|
||||
delete : "/api/applications/{application_id}/integrations/blynk"
|
||||
};
|
||||
}
|
||||
|
||||
// Generates application ID specific client-certificate.
|
||||
rpc GenerateMqttIntegrationClientCertificate(
|
||||
GenerateMqttIntegrationClientCertificateRequest)
|
||||
@@ -453,13 +451,13 @@ enum IntegrationKind {
|
||||
INFLUX_DB = 1;
|
||||
THINGS_BOARD = 2;
|
||||
MY_DEVICES = 3;
|
||||
LORA_CLOUD = 4;
|
||||
GCP_PUB_SUB = 5;
|
||||
AWS_SNS = 6;
|
||||
AZURE_SERVICE_BUS = 7;
|
||||
PILOT_THINGS = 8;
|
||||
MQTT_GLOBAL = 9;
|
||||
IFTTT = 10;
|
||||
BLYNK = 11;
|
||||
}
|
||||
|
||||
message Application {
|
||||
@@ -764,110 +762,6 @@ message DeleteMyDevicesIntegrationRequest {
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message LoraCloudIntegration {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
|
||||
// Modem & Geolocation Services configuration.
|
||||
LoraCloudModemGeolocationServices modem_geolocation_services = 2;
|
||||
}
|
||||
|
||||
message LoraCloudModemGeolocationServices {
|
||||
// API token.
|
||||
string token = 1;
|
||||
|
||||
// Device implements Modem / Modem-E stack.
|
||||
bool modem_enabled = 2;
|
||||
|
||||
// Forward FPorts.
|
||||
// Forward uplink messages matching the given FPorts to the MGS.
|
||||
repeated uint32 forward_f_ports = 16;
|
||||
|
||||
// Use rx time for GNSS resolving.
|
||||
// In case this is set to true, the MGS resolver will use the RX time of the
|
||||
// network instead of the timestamp included in the LR1110 payload.
|
||||
bool gnss_use_rx_time = 5;
|
||||
|
||||
// Use gateway location for GNSS resolving.
|
||||
// In the case this is set to true, ChirpStack will provide the location of
|
||||
// one of the gateways to the MGS resolver to aid the resolving process.
|
||||
// Disable this in case the gateway location is not accurate / incorrectly
|
||||
// configured as an incorrect location will cause the resolver to return an
|
||||
// error.
|
||||
bool gnss_use_gateway_location = 17;
|
||||
|
||||
// Parse TLV records.
|
||||
// If enabled, stream records (expected in TLV format) are scanned for GNSS
|
||||
// data (0x06 or 0x07). If found, ChirpStack will make an additional
|
||||
// geolocation call to the MGS API for resolving the location of the detected
|
||||
// payload.
|
||||
bool parse_tlv = 6;
|
||||
|
||||
// Geolocation buffer TTL (in seconds).
|
||||
// If > 0, uplink RX meta-data will be stored in a buffer so that
|
||||
// the meta-data of multiple uplinks can be used for geolocation.
|
||||
uint32 geolocation_buffer_ttl = 7;
|
||||
|
||||
// Geolocation minimum buffer size.
|
||||
// If > 0, geolocation will only be performed when the buffer has
|
||||
// at least the given size.
|
||||
uint32 geolocation_min_buffer_size = 8;
|
||||
|
||||
// TDOA based geolocation is enabled.
|
||||
bool geolocation_tdoa = 9;
|
||||
|
||||
// RSSI based geolocation is enabled.
|
||||
bool geolocation_rssi = 10;
|
||||
|
||||
// GNSS based geolocation is enabled (LR1110).
|
||||
bool geolocation_gnss = 11;
|
||||
|
||||
// GNSS payload field.
|
||||
// This holds the name of the field in the decoded payload object which
|
||||
// contains the GNSS payload bytes (as HEX string).
|
||||
string geolocation_gnss_payload_field = 12;
|
||||
|
||||
// GNSS use RX time.
|
||||
// In case this is set to true, the resolver will use the RX time of the
|
||||
// network instead of the timestamp included in the LR1110 payload.
|
||||
bool geolocation_gnss_use_rx_time = 13;
|
||||
|
||||
// Wifi based geolocation is enabled.
|
||||
bool geolocation_wifi = 14;
|
||||
|
||||
// Wifi payload field.
|
||||
// This holds the name of the field in the decoded payload object which
|
||||
// contains an array of objects with the following fields:
|
||||
// * macAddress - e.g. 01:23:45:67:89:ab
|
||||
// * signalStrength - e.g. -51 (optional)
|
||||
string geolocation_wifi_payload_field = 15;
|
||||
}
|
||||
|
||||
message CreateLoraCloudIntegrationRequest {
|
||||
// Integration object to create.
|
||||
LoraCloudIntegration integration = 1;
|
||||
}
|
||||
|
||||
message GetLoraCloudIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GetLoraCloudIntegrationResponse {
|
||||
// Integration object.
|
||||
LoraCloudIntegration integration = 1;
|
||||
}
|
||||
|
||||
message UpdateLoraCloudIntegrationRequest {
|
||||
// Integration object to update.
|
||||
LoraCloudIntegration integration = 1;
|
||||
}
|
||||
|
||||
message DeleteLoraCloudIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GcpPubSubIntegration {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
@@ -1095,6 +989,39 @@ message DeleteIftttIntegrationRequest {
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message BlynkIntegration {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
|
||||
// Blynk integration token.
|
||||
string token = 2;
|
||||
}
|
||||
|
||||
message CreateBlynkIntegrationRequest {
|
||||
// Integration object to create.
|
||||
BlynkIntegration integration = 1;
|
||||
}
|
||||
|
||||
message GetBlynkIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GetBlynkIntegrationResponse {
|
||||
// Integration object.
|
||||
BlynkIntegration integration = 1;
|
||||
}
|
||||
|
||||
message UpdateBlynkIntegrationRequest {
|
||||
// Integration object to update.
|
||||
BlynkIntegration integration = 1;
|
||||
}
|
||||
|
||||
message DeleteBlynkIntegrationRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
}
|
||||
|
||||
message GenerateMqttIntegrationClientCertificateRequest {
|
||||
// Application ID (UUID).
|
||||
string application_id = 1;
|
||||
|
2
api/rust/proto/chirpstack/api/device.proto
vendored
2
api/rust/proto/chirpstack/api/device.proto
vendored
@@ -543,6 +543,8 @@ message DeviceQueueItem {
|
||||
bool confirmed = 3;
|
||||
|
||||
// FPort (must be > 0).
|
||||
// On enqueue and if using a JavaScript codec, this value might be
|
||||
// automatically set by the codec function.
|
||||
uint32 f_port = 4;
|
||||
|
||||
// Data.
|
||||
|
@@ -360,4 +360,8 @@ message DownlinkCommand {
|
||||
// Only use this when a codec has been configured that can encode this
|
||||
// object to bytes.
|
||||
google.protobuf.Struct object = 6;
|
||||
|
||||
// Expires at (optional).
|
||||
// Expired queue-items will be automatically removed from the queue.
|
||||
google.protobuf.Timestamp expires_at = 7;
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "backend"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
authors = ["Orne Brocaar <info@brocaar.com>"]
|
||||
edition = "2018"
|
||||
publish = false
|
||||
|
@@ -3,14 +3,14 @@
|
||||
description = "Library for building external ChirpStack integrations"
|
||||
homepage = "https://www.chirpstack.io/"
|
||||
license = "MIT"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
authors = ["Orne Brocaar <info@brocaar.com>"]
|
||||
edition = "2021"
|
||||
repository = "https://github.com/chirpstack/chirpstack"
|
||||
|
||||
[dependencies]
|
||||
chirpstack_api = { path = "../api/rust", version = "4.13.0" }
|
||||
redis = { version = "0.31", features = [
|
||||
chirpstack_api = { path = "../api/rust", version = "4.14.0" }
|
||||
redis = { version = "0.32", features = [
|
||||
"cluster-async",
|
||||
"tokio-rustls-comp",
|
||||
] }
|
||||
@@ -25,4 +25,4 @@
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
tokio = { version = "1.44", features = ["macros", "rt-multi-thread"] }
|
||||
serde_json = "1.0"
|
||||
toml = "0.8"
|
||||
toml = "0.9"
|
||||
|
@@ -3,7 +3,7 @@
|
||||
description = "ChirpStack is an open-source LoRaWAN(TM) Network Server"
|
||||
repository = "https://github.com/chirpstack/chirpstack"
|
||||
homepage = "https://www.chirpstack.io/"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
authors = ["Orne Brocaar <info@brocaar.com>"]
|
||||
edition = "2021"
|
||||
publish = false
|
||||
@@ -19,7 +19,7 @@
|
||||
serde_json = "1.0"
|
||||
serde_urlencoded = "0.7"
|
||||
humantime-serde = "1.1"
|
||||
toml = "0.8"
|
||||
toml = "0.9"
|
||||
handlebars = "6.3"
|
||||
validator = { version = "0.20", features = ["derive"] }
|
||||
|
||||
@@ -27,15 +27,15 @@
|
||||
email_address = "0.2"
|
||||
diesel = { version = "2.2", features = ["chrono", "numeric"] }
|
||||
diesel_migrations = { version = "2.2" }
|
||||
diesel-async = { version = "0.5", features = [
|
||||
diesel-async = { version = "0.6", features = [
|
||||
"deadpool",
|
||||
"async-connection-wrapper",
|
||||
] }
|
||||
tokio-postgres = { version = "0.7", optional = true }
|
||||
tokio-postgres-rustls = { version = "0.13", optional = true }
|
||||
bigdecimal = "0.4"
|
||||
redis = { version = "0.31", features = ["tls-rustls", "tokio-rustls-comp"] }
|
||||
deadpool-redis = { version = "0.21", features = ["cluster", "serde"] }
|
||||
redis = { version = "0.32", features = ["tls-rustls", "tokio-rustls-comp"] }
|
||||
deadpool-redis = { version = "0.22", features = ["cluster", "serde"] }
|
||||
|
||||
# Logging
|
||||
tracing = "0.1"
|
||||
@@ -69,10 +69,13 @@
|
||||
urlencoding = "2.1"
|
||||
geohash = "0.13"
|
||||
gcp_auth = "0.12"
|
||||
lapin = { version = "2.5", default-features = false }
|
||||
lapin = { version = "3.1", default-features = false, features = [
|
||||
"rustls--ring",
|
||||
"rustls-native-certs",
|
||||
] }
|
||||
tokio-executor-trait = "2.1"
|
||||
tokio-reactor-trait = "1.1"
|
||||
rdkafka = { version = "0.37", default-features = false, features = [
|
||||
tokio-reactor-trait = "3.8"
|
||||
rdkafka = { version = "0.38", default-features = false, features = [
|
||||
"tokio",
|
||||
"cmake-build",
|
||||
] }
|
||||
@@ -81,7 +84,7 @@
|
||||
tonic = "0.13"
|
||||
tonic-web = "0.13"
|
||||
tonic-reflection = "0.13"
|
||||
tokio = { version = "1.44", features = ["macros", "rt-multi-thread"] }
|
||||
tokio = { version = "1.47", features = ["macros", "rt-multi-thread"] }
|
||||
tokio-stream = "0.1"
|
||||
prost-types = "0.13"
|
||||
prost = "0.13"
|
||||
@@ -97,6 +100,7 @@
|
||||
http-body = "1.0"
|
||||
rust-embed = "8.7"
|
||||
mime_guess = "2.0"
|
||||
url = "2.5"
|
||||
tower-http = { version = "0.6", features = ["trace", "auth"] }
|
||||
|
||||
# Error handling
|
||||
|
@@ -0,0 +1,4 @@
|
||||
delete from
|
||||
application_integration
|
||||
where
|
||||
kind = 'LoraCloud';
|
@@ -0,0 +1,4 @@
|
||||
delete from
|
||||
application_integration
|
||||
where
|
||||
kind = 'LoraCloud';
|
@@ -5,6 +5,8 @@ use async_trait::async_trait;
|
||||
|
||||
use super::{Handler, Request, Response};
|
||||
|
||||
use rquickjs::CatchResultExt;
|
||||
|
||||
pub struct Plugin {
|
||||
script: String,
|
||||
id: String,
|
||||
@@ -18,15 +20,34 @@ impl Plugin {
|
||||
let script = fs::read_to_string(file_path).context("Read ADR plugin")?;
|
||||
|
||||
let (id, name) = ctx.with::<_, Result<(String, String)>>(|ctx| {
|
||||
let m = rquickjs::Module::declare(ctx, "script", script.clone())
|
||||
.context("Declare script")?;
|
||||
let (m, m_promise) = m.eval().context("Evaluate script")?;
|
||||
() = m_promise.finish()?;
|
||||
let id_func: rquickjs::Function = m.get("id").context("Get id function")?;
|
||||
let name_func: rquickjs::Function = m.get("name").context("Get name function")?;
|
||||
let m = rquickjs::Module::declare(ctx.clone(), "script", script.clone())
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Declare script: JS error: {}", e))?;
|
||||
let (m, m_promise) = m
|
||||
.eval()
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Evaluate script: JS error: {}", e))?;
|
||||
() = m_promise
|
||||
.finish()
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Evaluate script: JS error: {}", e))?;
|
||||
let id_func: rquickjs::Function = m
|
||||
.get("id")
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Get id function: JS error: {}", e))?;
|
||||
let name_func: rquickjs::Function = m
|
||||
.get("name")
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Get name function: JS error: {}", e))?;
|
||||
|
||||
let id: String = id_func.call(()).context("Call id function")?;
|
||||
let name: String = name_func.call(()).context("Call name function")?;
|
||||
let id: String = id_func
|
||||
.call(())
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Call id function: JS error: {}", e))?;
|
||||
let name: String = name_func
|
||||
.call(())
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Call name function: JS error: {}", e))?;
|
||||
|
||||
Ok((id, name))
|
||||
})?;
|
||||
@@ -53,10 +74,17 @@ impl Handler for Plugin {
|
||||
|
||||
ctx.with::<_, Result<Response>>(|ctx| {
|
||||
let m = rquickjs::Module::declare(ctx.clone(), "script", self.script.clone())
|
||||
.context("Declare script")?;
|
||||
let (m, m_promise) = m.eval().context("Evaluate script")?;
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Declare script: JS error: {}", e))?;
|
||||
let (m, m_promise) = m
|
||||
.eval()
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Eval script: JS error: {}", e))?;
|
||||
() = m_promise.finish()?;
|
||||
let func: rquickjs::Function = m.get("handle").context("Get handle function")?;
|
||||
let func: rquickjs::Function = m
|
||||
.get("handle")
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Get handle function: JS error: {}", e))?;
|
||||
|
||||
let device_variables = rquickjs::Object::new(ctx.clone())?;
|
||||
for (k, v) in &req.device_variables {
|
||||
@@ -95,14 +123,24 @@ impl Handler for Plugin {
|
||||
|
||||
input.set("uplinkHistory", uplink_history)?;
|
||||
|
||||
let res: rquickjs::Object = func.call((input,)).context("Call handle function")?;
|
||||
let res: rquickjs::Object = func
|
||||
.call((input,))
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Call handle function: JS error: {}", e))?;
|
||||
|
||||
Ok(Response {
|
||||
dr: res.get("dr").context("Get dr response")?,
|
||||
dr: res
|
||||
.get("dr")
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Get dr response: JS error: {}", e))?,
|
||||
tx_power_index: res
|
||||
.get("txPowerIndex")
|
||||
.context("Get txPowerIndex response")?,
|
||||
nb_trans: res.get("nbTrans").context("Get nbTrans response")?,
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Get txPowerIndex response: JS error: {}", e))?,
|
||||
nb_trans: res
|
||||
.get("nbTrans")
|
||||
.catch(&ctx)
|
||||
.map_err(|e| anyhow!("Get nbTrans response: JS error: {}", e))?,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@@ -233,7 +233,6 @@ impl ApplicationService for Application {
|
||||
application::IntegrationKind::InfluxDb => api::IntegrationKind::InfluxDb,
|
||||
application::IntegrationKind::ThingsBoard => api::IntegrationKind::ThingsBoard,
|
||||
application::IntegrationKind::MyDevices => api::IntegrationKind::MyDevices,
|
||||
application::IntegrationKind::LoraCloud => api::IntegrationKind::LoraCloud,
|
||||
application::IntegrationKind::GcpPubSub => api::IntegrationKind::GcpPubSub,
|
||||
application::IntegrationKind::AwsSns => api::IntegrationKind::AwsSns,
|
||||
application::IntegrationKind::AzureServiceBus => {
|
||||
@@ -241,6 +240,7 @@ impl ApplicationService for Application {
|
||||
}
|
||||
application::IntegrationKind::PilotThings => api::IntegrationKind::PilotThings,
|
||||
application::IntegrationKind::Ifttt => api::IntegrationKind::Ifttt,
|
||||
application::IntegrationKind::Blynk => api::IntegrationKind::Blynk,
|
||||
}
|
||||
.into(),
|
||||
})
|
||||
@@ -878,227 +878,6 @@ impl ApplicationService for Application {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn create_lora_cloud_integration(
|
||||
&self,
|
||||
request: Request<api::CreateLoraCloudIntegrationRequest>,
|
||||
) -> Result<Response<()>, Status> {
|
||||
let req_int = match &request.get_ref().integration {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(Status::invalid_argument("integration is missing"));
|
||||
}
|
||||
};
|
||||
let app_id = Uuid::from_str(&req_int.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Update, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let req_mgs = match &req_int.modem_geolocation_services {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(Status::invalid_argument(
|
||||
"modem_geolocation_services configuration is missing",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::LoraCloud,
|
||||
configuration: application::IntegrationConfiguration::LoraCloud(
|
||||
application::LoraCloudConfiguration {
|
||||
modem_geolocation_services: application::LoraCloudModemGeolocationServices {
|
||||
token: req_mgs.token.clone(),
|
||||
modem_enabled: req_mgs.modem_enabled,
|
||||
forward_f_ports: req_mgs.forward_f_ports.clone(),
|
||||
gnss_use_rx_time: req_mgs.gnss_use_rx_time,
|
||||
gnss_use_gateway_location: req_mgs.gnss_use_gateway_location,
|
||||
parse_tlv: req_mgs.parse_tlv,
|
||||
geolocation_buffer_ttl: req_mgs.geolocation_buffer_ttl,
|
||||
geolocation_min_buffer_size: req_mgs.geolocation_min_buffer_size,
|
||||
geolocation_tdoa: req_mgs.geolocation_tdoa,
|
||||
geolocation_rssi: req_mgs.geolocation_rssi,
|
||||
geolocation_gnss: req_mgs.geolocation_gnss,
|
||||
geolocation_gnss_payload_field: req_mgs
|
||||
.geolocation_gnss_payload_field
|
||||
.clone(),
|
||||
geolocation_gnss_use_rx_time: req_mgs.geolocation_gnss_use_rx_time,
|
||||
geolocation_wifi: req_mgs.geolocation_wifi,
|
||||
geolocation_wifi_payload_field: req_mgs
|
||||
.geolocation_wifi_payload_field
|
||||
.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
let mut resp = Response::new(());
|
||||
resp.metadata_mut().insert(
|
||||
"x-log-application_id",
|
||||
req_int.application_id.parse().unwrap(),
|
||||
);
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_lora_cloud_integration(
|
||||
&self,
|
||||
request: Request<api::GetLoraCloudIntegrationRequest>,
|
||||
) -> Result<Response<api::GetLoraCloudIntegrationResponse>, Status> {
|
||||
let req = request.get_ref();
|
||||
let app_id = Uuid::from_str(&req.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Read, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let i = application::get_integration(&app_id, application::IntegrationKind::LoraCloud)
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
if let application::IntegrationConfiguration::LoraCloud(conf) = &i.configuration {
|
||||
let mgs = &conf.modem_geolocation_services;
|
||||
|
||||
let mut resp = Response::new(api::GetLoraCloudIntegrationResponse {
|
||||
integration: Some(api::LoraCloudIntegration {
|
||||
application_id: app_id.to_string(),
|
||||
modem_geolocation_services: Some(api::LoraCloudModemGeolocationServices {
|
||||
token: mgs.token.clone(),
|
||||
modem_enabled: mgs.modem_enabled,
|
||||
forward_f_ports: mgs.forward_f_ports.clone(),
|
||||
gnss_use_rx_time: mgs.gnss_use_rx_time,
|
||||
gnss_use_gateway_location: mgs.gnss_use_gateway_location,
|
||||
parse_tlv: mgs.parse_tlv,
|
||||
geolocation_buffer_ttl: mgs.geolocation_buffer_ttl,
|
||||
geolocation_min_buffer_size: mgs.geolocation_min_buffer_size,
|
||||
geolocation_tdoa: mgs.geolocation_tdoa,
|
||||
geolocation_rssi: mgs.geolocation_rssi,
|
||||
geolocation_gnss: mgs.geolocation_gnss,
|
||||
geolocation_gnss_payload_field: mgs.geolocation_gnss_payload_field.clone(),
|
||||
geolocation_gnss_use_rx_time: mgs.geolocation_gnss_use_rx_time,
|
||||
geolocation_wifi: mgs.geolocation_wifi,
|
||||
geolocation_wifi_payload_field: mgs.geolocation_wifi_payload_field.clone(),
|
||||
}),
|
||||
}),
|
||||
});
|
||||
resp.metadata_mut()
|
||||
.insert("x-log-application_id", req.application_id.parse().unwrap());
|
||||
|
||||
Ok(resp)
|
||||
} else {
|
||||
Err(Status::internal(
|
||||
"Integration has no LoraCloud configuration",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_lora_cloud_integration(
|
||||
&self,
|
||||
request: Request<api::UpdateLoraCloudIntegrationRequest>,
|
||||
) -> Result<Response<()>, Status> {
|
||||
let req_int = match &request.get_ref().integration {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(Status::invalid_argument("integration is missing"));
|
||||
}
|
||||
};
|
||||
let app_id = Uuid::from_str(&req_int.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Update, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let req_mgs = match &req_int.modem_geolocation_services {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(Status::invalid_argument(
|
||||
"modem_geolocation_services configuration is missing",
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::LoraCloud,
|
||||
configuration: application::IntegrationConfiguration::LoraCloud(
|
||||
application::LoraCloudConfiguration {
|
||||
modem_geolocation_services: application::LoraCloudModemGeolocationServices {
|
||||
token: req_mgs.token.clone(),
|
||||
modem_enabled: req_mgs.modem_enabled,
|
||||
forward_f_ports: req_mgs.forward_f_ports.clone(),
|
||||
gnss_use_rx_time: req_mgs.gnss_use_rx_time,
|
||||
gnss_use_gateway_location: req_mgs.gnss_use_gateway_location,
|
||||
parse_tlv: req_mgs.parse_tlv,
|
||||
geolocation_buffer_ttl: req_mgs.geolocation_buffer_ttl,
|
||||
geolocation_min_buffer_size: req_mgs.geolocation_min_buffer_size,
|
||||
geolocation_tdoa: req_mgs.geolocation_tdoa,
|
||||
geolocation_rssi: req_mgs.geolocation_rssi,
|
||||
geolocation_gnss: req_mgs.geolocation_gnss,
|
||||
geolocation_gnss_payload_field: req_mgs
|
||||
.geolocation_gnss_payload_field
|
||||
.clone(),
|
||||
geolocation_gnss_use_rx_time: req_mgs.geolocation_gnss_use_rx_time,
|
||||
geolocation_wifi: req_mgs.geolocation_wifi,
|
||||
geolocation_wifi_payload_field: req_mgs
|
||||
.geolocation_wifi_payload_field
|
||||
.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
let mut resp = Response::new(());
|
||||
resp.metadata_mut().insert(
|
||||
"x-log-application_id",
|
||||
req_int.application_id.parse().unwrap(),
|
||||
);
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn delete_lora_cloud_integration(
|
||||
&self,
|
||||
request: Request<api::DeleteLoraCloudIntegrationRequest>,
|
||||
) -> Result<Response<()>, Status> {
|
||||
let req = request.get_ref();
|
||||
let app_id = Uuid::from_str(&req.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Update, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
application::delete_integration(&app_id, application::IntegrationKind::LoraCloud)
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
let mut resp = Response::new(());
|
||||
resp.metadata_mut()
|
||||
.insert("x-log-application_id", req.application_id.parse().unwrap());
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn create_gcp_pub_sub_integration(
|
||||
&self,
|
||||
request: Request<api::CreateGcpPubSubIntegrationRequest>,
|
||||
@@ -1866,6 +1645,147 @@ impl ApplicationService for Application {
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn create_blynk_integration(
|
||||
&self,
|
||||
request: Request<api::CreateBlynkIntegrationRequest>,
|
||||
) -> Result<Response<()>, Status> {
|
||||
let req_int = match &request.get_ref().integration {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(Status::invalid_argument("integration is missing"));
|
||||
}
|
||||
};
|
||||
let app_id = Uuid::from_str(&req_int.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Update, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _ = application::create_integration(application::Integration {
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::Blynk,
|
||||
configuration: application::IntegrationConfiguration::Blynk(
|
||||
application::BlynkConfiguration {
|
||||
token: req_int.token.clone(),
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
let mut resp = Response::new(());
|
||||
resp.metadata_mut().insert(
|
||||
"x-log-application_id",
|
||||
req_int.application_id.parse().unwrap(),
|
||||
);
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn get_blynk_integration(
|
||||
&self,
|
||||
request: Request<api::GetBlynkIntegrationRequest>,
|
||||
) -> Result<Response<api::GetBlynkIntegrationResponse>, Status> {
|
||||
let req = request.get_ref();
|
||||
let app_id = Uuid::from_str(&req.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Read, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let i = application::get_integration(&app_id, application::IntegrationKind::Blynk)
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
if let application::IntegrationConfiguration::Blynk(conf) = &i.configuration {
|
||||
let mut resp = Response::new(api::GetBlynkIntegrationResponse {
|
||||
integration: Some(api::BlynkIntegration {
|
||||
application_id: app_id.to_string(),
|
||||
token: conf.token.clone(),
|
||||
}),
|
||||
});
|
||||
resp.metadata_mut()
|
||||
.insert("x-log-application_id", req.application_id.parse().unwrap());
|
||||
|
||||
Ok(resp)
|
||||
} else {
|
||||
Err(Status::internal("Integration has no Blynk configuration"))
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_blynk_integration(
|
||||
&self,
|
||||
request: Request<api::UpdateBlynkIntegrationRequest>,
|
||||
) -> Result<Response<()>, Status> {
|
||||
let req_int = match &request.get_ref().integration {
|
||||
Some(v) => v,
|
||||
None => {
|
||||
return Err(Status::invalid_argument("integration is missing"));
|
||||
}
|
||||
};
|
||||
let app_id = Uuid::from_str(&req_int.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Update, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
let _ = application::update_integration(application::Integration {
|
||||
application_id: app_id.into(),
|
||||
kind: application::IntegrationKind::Blynk,
|
||||
configuration: application::IntegrationConfiguration::Blynk(
|
||||
application::BlynkConfiguration {
|
||||
token: req_int.token.clone(),
|
||||
},
|
||||
),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
let mut resp = Response::new(());
|
||||
resp.metadata_mut().insert(
|
||||
"x-log-application_id",
|
||||
req_int.application_id.parse().unwrap(),
|
||||
);
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn delete_blynk_integration(
|
||||
&self,
|
||||
request: Request<api::DeleteBlynkIntegrationRequest>,
|
||||
) -> Result<Response<()>, Status> {
|
||||
let req = request.get_ref();
|
||||
let app_id = Uuid::from_str(&req.application_id).map_err(|e| e.status())?;
|
||||
|
||||
self.validator
|
||||
.validate(
|
||||
request.extensions(),
|
||||
validator::ValidateApplicationAccess::new(validator::Flag::Update, app_id),
|
||||
)
|
||||
.await?;
|
||||
|
||||
application::delete_integration(&app_id, application::IntegrationKind::Blynk)
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
let mut resp = Response::new(());
|
||||
resp.metadata_mut()
|
||||
.insert("x-log-application_id", req.application_id.parse().unwrap());
|
||||
|
||||
Ok(resp)
|
||||
}
|
||||
|
||||
async fn generate_mqtt_integration_client_certificate(
|
||||
&self,
|
||||
request: Request<api::GenerateMqttIntegrationClientCertificateRequest>,
|
||||
@@ -2690,197 +2610,6 @@ pub mod test {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_lora_cloud_integration() {
|
||||
let _guard = test::prepare().await;
|
||||
let app = get_application().await;
|
||||
let u = get_user().await;
|
||||
let service = Application::new(RequestValidator::new());
|
||||
|
||||
// create
|
||||
let create_req = get_request(
|
||||
&u.id,
|
||||
api::CreateLoraCloudIntegrationRequest {
|
||||
integration: Some(api::LoraCloudIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
modem_geolocation_services: Some(api::LoraCloudModemGeolocationServices {
|
||||
token: "test-token".into(),
|
||||
modem_enabled: true,
|
||||
forward_f_ports: vec![199, 198, 197, 192],
|
||||
gnss_use_rx_time: true,
|
||||
gnss_use_gateway_location: true,
|
||||
parse_tlv: true,
|
||||
geolocation_buffer_ttl: 300,
|
||||
geolocation_min_buffer_size: 2,
|
||||
geolocation_tdoa: true,
|
||||
geolocation_rssi: true,
|
||||
geolocation_gnss: true,
|
||||
geolocation_gnss_payload_field: "gnss_pl".into(),
|
||||
geolocation_gnss_use_rx_time: true,
|
||||
geolocation_wifi: true,
|
||||
geolocation_wifi_payload_field: "wifi_pl".into(),
|
||||
}),
|
||||
}),
|
||||
},
|
||||
);
|
||||
let _ = service
|
||||
.create_lora_cloud_integration(create_req)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// get
|
||||
let get_req = get_request(
|
||||
&u.id,
|
||||
api::GetLoraCloudIntegrationRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let get_resp = service.get_lora_cloud_integration(get_req).await.unwrap();
|
||||
let get_resp = get_resp.get_ref();
|
||||
assert_eq!(
|
||||
Some(api::LoraCloudIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
modem_geolocation_services: Some(api::LoraCloudModemGeolocationServices {
|
||||
token: "test-token".into(),
|
||||
modem_enabled: true,
|
||||
forward_f_ports: vec![199, 198, 197, 192],
|
||||
gnss_use_rx_time: true,
|
||||
gnss_use_gateway_location: true,
|
||||
parse_tlv: true,
|
||||
geolocation_buffer_ttl: 300,
|
||||
geolocation_min_buffer_size: 2,
|
||||
geolocation_tdoa: true,
|
||||
geolocation_rssi: true,
|
||||
geolocation_gnss: true,
|
||||
geolocation_gnss_payload_field: "gnss_pl".into(),
|
||||
geolocation_gnss_use_rx_time: true,
|
||||
geolocation_wifi: true,
|
||||
geolocation_wifi_payload_field: "wifi_pl".into(),
|
||||
}),
|
||||
}),
|
||||
get_resp.integration
|
||||
);
|
||||
|
||||
// update
|
||||
let update_req = get_request(
|
||||
&u.id,
|
||||
api::UpdateLoraCloudIntegrationRequest {
|
||||
integration: Some(api::LoraCloudIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
modem_geolocation_services: Some(api::LoraCloudModemGeolocationServices {
|
||||
token: "test-token-updated".into(),
|
||||
modem_enabled: true,
|
||||
forward_f_ports: vec![199, 198, 197, 192],
|
||||
gnss_use_rx_time: true,
|
||||
gnss_use_gateway_location: true,
|
||||
parse_tlv: true,
|
||||
geolocation_buffer_ttl: 300,
|
||||
geolocation_min_buffer_size: 2,
|
||||
geolocation_tdoa: true,
|
||||
geolocation_rssi: true,
|
||||
geolocation_gnss: true,
|
||||
geolocation_gnss_payload_field: "gnss_pl".into(),
|
||||
geolocation_gnss_use_rx_time: true,
|
||||
geolocation_wifi: true,
|
||||
geolocation_wifi_payload_field: "wifi_pl".into(),
|
||||
}),
|
||||
}),
|
||||
},
|
||||
);
|
||||
let _ = service
|
||||
.update_lora_cloud_integration(update_req)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// get
|
||||
let get_req = get_request(
|
||||
&u.id,
|
||||
api::GetLoraCloudIntegrationRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let get_resp = service.get_lora_cloud_integration(get_req).await.unwrap();
|
||||
let get_resp = get_resp.get_ref();
|
||||
assert_eq!(
|
||||
Some(api::LoraCloudIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
modem_geolocation_services: Some(api::LoraCloudModemGeolocationServices {
|
||||
token: "test-token-updated".into(),
|
||||
modem_enabled: true,
|
||||
forward_f_ports: vec![199, 198, 197, 192],
|
||||
gnss_use_rx_time: true,
|
||||
gnss_use_gateway_location: true,
|
||||
parse_tlv: true,
|
||||
geolocation_buffer_ttl: 300,
|
||||
geolocation_min_buffer_size: 2,
|
||||
geolocation_tdoa: true,
|
||||
geolocation_rssi: true,
|
||||
geolocation_gnss: true,
|
||||
geolocation_gnss_payload_field: "gnss_pl".into(),
|
||||
geolocation_gnss_use_rx_time: true,
|
||||
geolocation_wifi: true,
|
||||
geolocation_wifi_payload_field: "wifi_pl".into(),
|
||||
}),
|
||||
}),
|
||||
get_resp.integration
|
||||
);
|
||||
|
||||
// list
|
||||
let list_req = get_request(
|
||||
&u.id,
|
||||
api::ListIntegrationsRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let list_resp = service.list_integrations(list_req).await.unwrap();
|
||||
let list_resp = list_resp.get_ref();
|
||||
assert_eq!(
|
||||
&api::ListIntegrationsResponse {
|
||||
total_count: 2,
|
||||
result: vec![
|
||||
api::IntegrationListItem {
|
||||
kind: api::IntegrationKind::LoraCloud.into(),
|
||||
},
|
||||
api::IntegrationListItem {
|
||||
kind: api::IntegrationKind::MqttGlobal.into(),
|
||||
}
|
||||
],
|
||||
},
|
||||
list_resp
|
||||
);
|
||||
|
||||
// delete
|
||||
let del_req = get_request(
|
||||
&u.id,
|
||||
api::DeleteLoraCloudIntegrationRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let _ = service
|
||||
.delete_lora_cloud_integration(del_req)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// list
|
||||
let list_req = get_request(
|
||||
&u.id,
|
||||
api::ListIntegrationsRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let list_resp = service.list_integrations(list_req).await.unwrap();
|
||||
let list_resp = list_resp.get_ref();
|
||||
assert_eq!(
|
||||
&api::ListIntegrationsResponse {
|
||||
total_count: 1,
|
||||
result: vec![api::IntegrationListItem {
|
||||
kind: api::IntegrationKind::MqttGlobal.into(),
|
||||
},],
|
||||
},
|
||||
list_resp
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_gcp_pub_sub_integration() {
|
||||
let _guard = test::prepare().await;
|
||||
@@ -3561,4 +3290,122 @@ pub mod test {
|
||||
list_resp
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_blynk_integration() {
|
||||
let _guard = test::prepare().await;
|
||||
let app = get_application().await;
|
||||
let u = get_user().await;
|
||||
let service = Application::new(RequestValidator::new());
|
||||
|
||||
// create
|
||||
let create_req = get_request(
|
||||
&u.id,
|
||||
api::CreateBlynkIntegrationRequest {
|
||||
integration: Some(api::BlynkIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
token: "foobartoken".into(),
|
||||
}),
|
||||
},
|
||||
);
|
||||
let _ = service.create_blynk_integration(create_req).await.unwrap();
|
||||
|
||||
// get
|
||||
let get_req = get_request(
|
||||
&u.id,
|
||||
api::GetBlynkIntegrationRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let get_resp = service.get_blynk_integration(get_req).await.unwrap();
|
||||
let get_resp = get_resp.get_ref();
|
||||
assert_eq!(
|
||||
Some(api::BlynkIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
token: "foobartoken".into(),
|
||||
}),
|
||||
get_resp.integration
|
||||
);
|
||||
|
||||
// update
|
||||
let update_req = get_request(
|
||||
&u.id,
|
||||
api::UpdateBlynkIntegrationRequest {
|
||||
integration: Some(api::BlynkIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
token: "someothertoken".into(),
|
||||
}),
|
||||
},
|
||||
);
|
||||
let _ = service.update_blynk_integration(update_req).await.unwrap();
|
||||
|
||||
// get
|
||||
let get_req = get_request(
|
||||
&u.id,
|
||||
api::GetBlynkIntegrationRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let get_resp = service.get_blynk_integration(get_req).await.unwrap();
|
||||
let get_resp = get_resp.get_ref();
|
||||
assert_eq!(
|
||||
Some(api::BlynkIntegration {
|
||||
application_id: app.id.to_string(),
|
||||
token: "someothertoken".into(),
|
||||
}),
|
||||
get_resp.integration
|
||||
);
|
||||
|
||||
// list
|
||||
let list_req = get_request(
|
||||
&u.id,
|
||||
api::ListIntegrationsRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let list_resp = service.list_integrations(list_req).await.unwrap();
|
||||
let list_resp = list_resp.get_ref();
|
||||
assert_eq!(
|
||||
&api::ListIntegrationsResponse {
|
||||
total_count: 2,
|
||||
result: vec![
|
||||
api::IntegrationListItem {
|
||||
kind: api::IntegrationKind::Blynk.into(),
|
||||
},
|
||||
api::IntegrationListItem {
|
||||
kind: api::IntegrationKind::MqttGlobal.into(),
|
||||
}
|
||||
],
|
||||
},
|
||||
list_resp
|
||||
);
|
||||
|
||||
// delete
|
||||
let del_req = get_request(
|
||||
&u.id,
|
||||
api::DeleteBlynkIntegrationRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let _ = service.delete_blynk_integration(del_req).await.unwrap();
|
||||
|
||||
// list
|
||||
let list_req = get_request(
|
||||
&u.id,
|
||||
api::ListIntegrationsRequest {
|
||||
application_id: app.id.to_string(),
|
||||
},
|
||||
);
|
||||
let list_resp = service.list_integrations(list_req).await.unwrap();
|
||||
let list_resp = list_resp.get_ref();
|
||||
assert_eq!(
|
||||
&api::ListIntegrationsResponse {
|
||||
total_count: 1,
|
||||
result: vec![api::IntegrationListItem {
|
||||
kind: api::IntegrationKind::MqttGlobal.into(),
|
||||
},],
|
||||
},
|
||||
list_resp
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1075,6 +1075,7 @@ impl DeviceService for Device {
|
||||
.await?;
|
||||
|
||||
let mut data = req_qi.data.clone();
|
||||
let mut f_port = req_qi.f_port as u8;
|
||||
|
||||
if let Some(obj) = &req_qi.object {
|
||||
let dev = device::get(&dev_eui).await.map_err(|e| e.status())?;
|
||||
@@ -1082,7 +1083,7 @@ impl DeviceService for Device {
|
||||
.await
|
||||
.map_err(|e| e.status())?;
|
||||
|
||||
data = codec::struct_to_binary(
|
||||
(f_port, data) = codec::struct_to_binary(
|
||||
dp.payload_codec_runtime,
|
||||
req_qi.f_port as u8,
|
||||
&dev.variables,
|
||||
@@ -1096,7 +1097,7 @@ impl DeviceService for Device {
|
||||
let qi = device_queue::DeviceQueueItem {
|
||||
id: Uuid::new_v4().into(),
|
||||
dev_eui,
|
||||
f_port: req_qi.f_port as i16,
|
||||
f_port: f_port as i16,
|
||||
confirmed: req_qi.confirmed,
|
||||
is_encrypted: req_qi.is_encrypted,
|
||||
f_cnt_down: if req_qi.is_encrypted {
|
||||
|
@@ -391,10 +391,17 @@ impl Flow {
|
||||
frag_size: fragment_size as u8,
|
||||
padding: padding as u8,
|
||||
control: fragmentation::v1::FragSessionSetuReqPayloadControl {
|
||||
block_ack_delay: 0,
|
||||
fragmentation_matrix: 0,
|
||||
block_ack_delay: self.fuota_deployment.fragmentation_block_ack_delay
|
||||
as u8,
|
||||
fragmentation_matrix: self.fuota_deployment.fragmentation_matrix as u8,
|
||||
},
|
||||
descriptor: {
|
||||
let mut d = [0u8; 4];
|
||||
if self.fuota_deployment.fragmentation_descriptor.len() == 4 {
|
||||
d.copy_from_slice(&self.fuota_deployment.fragmentation_descriptor);
|
||||
}
|
||||
d
|
||||
},
|
||||
descriptor: [0, 0, 0, 0],
|
||||
},
|
||||
)
|
||||
.to_vec()?,
|
||||
@@ -444,11 +451,20 @@ impl Flow {
|
||||
frag_size: fragment_size as u8,
|
||||
padding: padding as u8,
|
||||
control: fragmentation::v2::FragSessionSetuReqPayloadControl {
|
||||
block_ack_delay: 0,
|
||||
block_ack_delay: self.fuota_deployment.fragmentation_block_ack_delay
|
||||
as u8,
|
||||
frag_algo: 0,
|
||||
ack_reception: false,
|
||||
},
|
||||
descriptor: [0, 0, 0, 0],
|
||||
descriptor: {
|
||||
let mut d = [0u8; 4];
|
||||
if self.fuota_deployment.fragmentation_descriptor.len() == 4 {
|
||||
d.copy_from_slice(
|
||||
&self.fuota_deployment.fragmentation_descriptor,
|
||||
);
|
||||
}
|
||||
d
|
||||
},
|
||||
mic,
|
||||
session_cnt,
|
||||
},
|
||||
|
@@ -4,6 +4,7 @@ use std::path::Path;
|
||||
use anyhow::Result;
|
||||
use serde::Deserialize;
|
||||
use tracing::{info, span, Instrument, Level};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::codec::Codec;
|
||||
use crate::storage::{self, device_profile_template};
|
||||
@@ -53,6 +54,8 @@ pub struct ProfileConfig {
|
||||
#[derive(Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct Profile {
|
||||
pub id: Uuid,
|
||||
pub vendor_profile_id: usize,
|
||||
pub region: region::CommonName,
|
||||
pub mac_version: region::MacVersion,
|
||||
pub reg_params_revision: region::Revision,
|
||||
@@ -69,6 +72,8 @@ pub struct Profile {
|
||||
impl Default for Profile {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
id: Uuid::nil(),
|
||||
vendor_profile_id: 0,
|
||||
region: region::CommonName::EU868,
|
||||
mac_version: region::MacVersion::LORAWAN_1_0_4,
|
||||
reg_params_revision: region::Revision::RP002_1_0_4,
|
||||
@@ -84,6 +89,7 @@ impl Default for Profile {
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct ProfileAbp {
|
||||
pub rx1_delay: usize,
|
||||
pub rx1_dr_offset: usize,
|
||||
@@ -92,6 +98,7 @@ pub struct ProfileAbp {
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct ProfileClassB {
|
||||
pub timeout_secs: usize,
|
||||
pub ping_slot_periodicity: usize,
|
||||
@@ -100,6 +107,7 @@ pub struct ProfileClassB {
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct ProfileClassC {
|
||||
pub timeout_secs: usize,
|
||||
}
|
||||
@@ -207,15 +215,8 @@ async fn handle_profile(
|
||||
|
||||
let profile_conf: ProfileConfig = toml::from_str(&fs::read_to_string(profile_path)?)?;
|
||||
|
||||
let id_regex = regex::Regex::new(r"[^a-zA-Z0-9\-]+").unwrap();
|
||||
let id = format!(
|
||||
"{}-{}-{}-{}",
|
||||
vendor.slug, device.slug, firmware.version, profile_conf.profile.region
|
||||
);
|
||||
let id = id_regex.replace_all(&id, "-").to_string();
|
||||
|
||||
let dpt = device_profile_template::DeviceProfileTemplate {
|
||||
id,
|
||||
id: profile_conf.profile.id.to_string(),
|
||||
name: device.name.clone(),
|
||||
description: device.description.clone(),
|
||||
vendor: vendor.name.clone(),
|
||||
|
@@ -13,6 +13,7 @@ const LPP_TEMPERATURE_SENSOR: u8 = 103;
|
||||
const LPP_HUMIDITY_SENSOR: u8 = 104;
|
||||
const LPP_ACCELEROMETER: u8 = 113;
|
||||
const LPP_BAROMETER: u8 = 115;
|
||||
const LPP_DISTANCE: u8 = 130;
|
||||
const LPP_GYROMETER: u8 = 134;
|
||||
const LPP_GPS_LOCATION: u8 = 136;
|
||||
|
||||
@@ -56,6 +57,7 @@ struct CayenneLpp {
|
||||
humidity_sensor: BTreeMap<u8, f64>,
|
||||
accelerometer: BTreeMap<u8, Accelerometer>,
|
||||
barometer: BTreeMap<u8, f64>,
|
||||
distance: BTreeMap<u8, f64>,
|
||||
gyrometer: BTreeMap<u8, Gyrometer>,
|
||||
gps_location: BTreeMap<u8, GpsLocation>,
|
||||
}
|
||||
@@ -82,6 +84,7 @@ impl CayenneLpp {
|
||||
LPP_HUMIDITY_SENSOR => lpp.set_humidity_sensor(buf[0], &mut cur)?,
|
||||
LPP_ACCELEROMETER => lpp.set_accelerometer(buf[0], &mut cur)?,
|
||||
LPP_BAROMETER => lpp.set_barometer(buf[0], &mut cur)?,
|
||||
LPP_DISTANCE => lpp.set_distance(buf[0], &mut cur)?,
|
||||
LPP_GYROMETER => lpp.set_gyrometer(buf[0], &mut cur)?,
|
||||
LPP_GPS_LOCATION => lpp.set_gps_location(buf[0], &mut cur)?,
|
||||
_ => {
|
||||
@@ -124,6 +127,7 @@ impl CayenneLpp {
|
||||
.set_accelerometer_from_value(v)
|
||||
.context("accelerometer")?,
|
||||
"barometer" => lpp.set_barometer_from_value(v).context("barometer")?,
|
||||
"distance" => lpp.set_distance_from_value(v).context("distance")?,
|
||||
"gyrometer" => lpp.set_gyrometer_from_value(v).context("gyrometer")?,
|
||||
"gpsLocation" => lpp.set_gps_location_from_value(v).context("gpsLocation")?,
|
||||
_ => {
|
||||
@@ -214,6 +218,14 @@ impl CayenneLpp {
|
||||
out.extend(val.to_be_bytes());
|
||||
}
|
||||
|
||||
// distance
|
||||
for (k, v) in &self.distance {
|
||||
out.extend([*k, LPP_DISTANCE]);
|
||||
|
||||
let val = (*v * 1000.0) as u32;
|
||||
out.extend(val.to_be_bytes());
|
||||
}
|
||||
|
||||
// gyrometer
|
||||
for (k, v) in &self.gyrometer {
|
||||
out.extend([*k, LPP_GYROMETER]);
|
||||
@@ -445,6 +457,24 @@ impl CayenneLpp {
|
||||
);
|
||||
}
|
||||
|
||||
if !self.distance.is_empty() {
|
||||
let mut val: pbjson_types::Struct = Default::default();
|
||||
for (k, v) in &self.distance {
|
||||
val.fields.insert(
|
||||
format!("{}", k),
|
||||
pbjson_types::Value {
|
||||
kind: Some(pbjson_types::value::Kind::NumberValue(*v)),
|
||||
},
|
||||
);
|
||||
}
|
||||
out.fields.insert(
|
||||
"distance".to_string(),
|
||||
pbjson_types::Value {
|
||||
kind: Some(pbjson_types::value::Kind::StructValue(val)),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if !self.gyrometer.is_empty() {
|
||||
let mut val: pbjson_types::Struct = Default::default();
|
||||
for (k, v) in &self.gyrometer {
|
||||
@@ -769,6 +799,27 @@ impl CayenneLpp {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_distance(&mut self, channel: u8, cur: &mut Cursor<&[u8]>) -> Result<()> {
|
||||
let mut buf: [u8; 4] = [0; 4];
|
||||
cur.read_exact(&mut buf)?;
|
||||
let val = u32::from_be_bytes(buf);
|
||||
self.distance.insert(channel, (val as f64) / 1000.0);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_distance_from_value(&mut self, v: &prost_types::Value) -> Result<()> {
|
||||
if let Some(prost_types::value::Kind::StructValue(s)) = &v.kind {
|
||||
for (k, v) in &s.fields {
|
||||
let c: u8 = k.parse()?;
|
||||
if let Some(prost_types::value::Kind::NumberValue(v)) = &v.kind {
|
||||
self.distance.insert(c, *v);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_gyrometer(&mut self, channel: u8, cur: &mut Cursor<&[u8]>) -> Result<()> {
|
||||
let mut buf_x: [u8; 2] = [0; 2];
|
||||
let mut buf_y: [u8; 2] = [0; 2];
|
||||
@@ -913,6 +964,7 @@ pub mod test {
|
||||
3, 104, 41, 5, 104, 150, // humidity sensors
|
||||
3, 113, 0, 1, 0, 2, 0, 3, 5, 113, 3, 234, 7, 211, 11, 187, // accelerometers
|
||||
3, 115, 4, 31, 5, 115, 9, 196, // barometers
|
||||
3, 130, 0, 0, 1, 16, 5, 130, 1, 2, 3, 4, // distance
|
||||
3, 134, 0, 1, 0, 2, 0, 3, 5, 134, 3, 233, 7, 210, 11, 187, // gyrometers
|
||||
1, 136, 6, 118, 95, 242, 150, 10, 0, 3, 232, // gps location
|
||||
];
|
||||
@@ -1222,6 +1274,30 @@ pub mod test {
|
||||
})),
|
||||
},
|
||||
),
|
||||
(
|
||||
"distance".to_string(),
|
||||
prost_types::Value {
|
||||
kind: Some(prost_types::value::Kind::StructValue(prost_types::Struct {
|
||||
fields: [
|
||||
(
|
||||
"3".to_string(),
|
||||
prost_types::Value {
|
||||
kind: Some(prost_types::value::Kind::NumberValue(0.272)),
|
||||
},
|
||||
),
|
||||
(
|
||||
"5".to_string(),
|
||||
prost_types::Value {
|
||||
kind: Some(prost_types::value::Kind::NumberValue(16909.060)),
|
||||
},
|
||||
),
|
||||
]
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect(),
|
||||
})),
|
||||
},
|
||||
),
|
||||
(
|
||||
"gyrometer".to_string(),
|
||||
prost_types::Value {
|
||||
@@ -1704,6 +1780,34 @@ pub mod test {
|
||||
)),
|
||||
},
|
||||
),
|
||||
(
|
||||
"distance".to_string(),
|
||||
pbjson_types::Value {
|
||||
kind: Some(pbjson_types::value::Kind::StructValue(
|
||||
pbjson_types::Struct {
|
||||
fields: [
|
||||
(
|
||||
"3".to_string(),
|
||||
pbjson_types::Value {
|
||||
kind: Some(pbjson_types::value::Kind::NumberValue(0.272)),
|
||||
},
|
||||
),
|
||||
(
|
||||
"5".to_string(),
|
||||
pbjson_types::Value {
|
||||
kind: Some(pbjson_types::value::Kind::NumberValue(
|
||||
16909.060,
|
||||
)),
|
||||
},
|
||||
),
|
||||
]
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect(),
|
||||
},
|
||||
)),
|
||||
},
|
||||
),
|
||||
(
|
||||
"gyrometer".to_string(),
|
||||
pbjson_types::Value {
|
||||
|
@@ -110,7 +110,7 @@ pub async fn encode(
|
||||
variables: &HashMap<String, String>,
|
||||
encode_config: &str,
|
||||
s: &prost_types::Struct,
|
||||
) -> Result<Vec<u8>> {
|
||||
) -> Result<(u8, Vec<u8>)> {
|
||||
let conf = config::get();
|
||||
let max_run_ts = SystemTime::now() + conf.codec.js.max_execution_time;
|
||||
|
||||
@@ -158,7 +158,6 @@ pub async fn encode(
|
||||
let buff: rquickjs::Function = buff.get("Buffer")?;
|
||||
|
||||
let input = rquickjs::Object::new(ctx.clone())?;
|
||||
input.set("fPort", f_port.into_js(&ctx)?)?;
|
||||
input.set("variables", variables.into_js(&ctx)?)?;
|
||||
input.set("data", convert::struct_to_rquickjs(&ctx, s))?;
|
||||
|
||||
@@ -186,10 +185,14 @@ pub async fn encode(
|
||||
|
||||
// Directly into u8 can result into the following error:
|
||||
// Error converting from js 'float' into type 'i32'
|
||||
let v: Vec<f64> = res.get("bytes")?;
|
||||
let v: Vec<u8> = v.iter().map(|v| *v as u8).collect();
|
||||
let b: Vec<f64> = res.get("bytes")?;
|
||||
let b: Vec<u8> = b.iter().map(|v| *v as u8).collect();
|
||||
|
||||
Ok(v)
|
||||
// Get fPort, or else fallback on provided fPort.
|
||||
let f_port: f64 = res.get("fPort").unwrap_or_else(|_| f_port as f64);
|
||||
let f_port = f_port as u8;
|
||||
|
||||
Ok((f_port, b))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -388,8 +391,7 @@ pub mod test {
|
||||
"#
|
||||
.to_string();
|
||||
|
||||
let mut vars: HashMap<String, String> = HashMap::new();
|
||||
vars.insert("foo".into(), "bar".into());
|
||||
let vars: HashMap<String, String> = HashMap::new();
|
||||
|
||||
let mut input = prost_types::Struct::default();
|
||||
input.fields.insert(
|
||||
@@ -400,6 +402,26 @@ pub mod test {
|
||||
);
|
||||
|
||||
let out = encode(10, &vars, &encoder, &input).await.unwrap();
|
||||
assert_eq!(vec![1], out);
|
||||
assert_eq!((10, vec![1]), out);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_encode_fport() {
|
||||
let encoder = r#"
|
||||
function encodeDownlink(input) {
|
||||
return {
|
||||
fPort: 20,
|
||||
bytes: [],
|
||||
};
|
||||
}
|
||||
"#
|
||||
.to_string();
|
||||
|
||||
let vars: HashMap<String, String> = HashMap::new();
|
||||
|
||||
let input = prost_types::Struct::default();
|
||||
|
||||
let out = encode(10, &vars, &encoder, &input).await.unwrap();
|
||||
assert_eq!((20, vec![]), out);
|
||||
}
|
||||
}
|
||||
|
@@ -97,10 +97,13 @@ pub async fn struct_to_binary(
|
||||
variables: &HashMap<String, String>,
|
||||
encoder_config: &str,
|
||||
obj: &prost_types::Struct,
|
||||
) -> Result<Vec<u8>> {
|
||||
) -> Result<(u8, Vec<u8>)> {
|
||||
Ok(match codec {
|
||||
Codec::NONE => Vec::new(),
|
||||
Codec::CAYENNE_LPP => cayenne_lpp::encode(obj).context("CayenneLpp encode")?,
|
||||
Codec::NONE => (f_port, Vec::new()),
|
||||
Codec::CAYENNE_LPP => (
|
||||
f_port,
|
||||
cayenne_lpp::encode(obj).context("CayenneLpp encode")?,
|
||||
),
|
||||
Codec::JS => js::encode(f_port, variables, encoder_config, obj).await?,
|
||||
})
|
||||
}
|
||||
|
@@ -67,7 +67,7 @@ impl<'a> Integration<'a> {
|
||||
// Use tokio executor and reactor.
|
||||
// At the moment the reactor is only available for unix.
|
||||
.with_executor(tokio_executor_trait::Tokio::current())
|
||||
.with_reactor(tokio_reactor_trait::Tokio);
|
||||
.with_reactor(tokio_reactor_trait::Tokio::current());
|
||||
|
||||
let conn = Connection::connect(&self.url, options).await?;
|
||||
let chan = conn.create_channel().await?;
|
||||
@@ -264,17 +264,17 @@ pub mod test {
|
||||
|
||||
let conf = Config {
|
||||
url: env::var("TEST_AMQP_URL").unwrap(),
|
||||
json: true,
|
||||
event_routing_key: "application.{{application_id}}.device.{{dev_eui}}.event.{{event}}"
|
||||
.to_string(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let i = Integration::new(&conf).await.unwrap();
|
||||
|
||||
let conn = loop {
|
||||
match Connection::connect(
|
||||
&conf.url,
|
||||
ConnectionProperties::default()
|
||||
.with_executor(tokio_executor_trait::Tokio::current())
|
||||
.with_reactor(tokio_reactor_trait::Tokio),
|
||||
.with_reactor(tokio_reactor_trait::Tokio::current()),
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -318,8 +318,6 @@ pub mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let i = Integration::new(&conf).await.unwrap();
|
||||
|
||||
let pl = integration::UplinkEvent {
|
||||
device_info: Some(integration::DeviceInfo {
|
||||
application_id: Uuid::nil().to_string(),
|
||||
|
348
chirpstack/src/integration/blynk.rs
Normal file
348
chirpstack/src/integration/blynk.rs
Normal file
@@ -0,0 +1,348 @@
|
||||
use std::collections::HashMap;
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use base64::prelude::*;
|
||||
use reqwest::header::{HeaderMap, AUTHORIZATION, CONTENT_TYPE};
|
||||
use reqwest::Client;
|
||||
use serde::Serialize;
|
||||
use tracing::{info, trace};
|
||||
use url::Url;
|
||||
|
||||
use super::Integration as IntegrationTrait;
|
||||
use crate::storage::application::BlynkConfiguration;
|
||||
use chirpstack_api::integration;
|
||||
|
||||
static CLIENT: OnceLock<Client> = OnceLock::new();
|
||||
|
||||
fn get_client() -> Client {
|
||||
CLIENT
|
||||
.get_or_init(|| {
|
||||
Client::builder()
|
||||
.timeout(Duration::from_secs(5))
|
||||
.use_rustls_tls()
|
||||
.build()
|
||||
.unwrap()
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
|
||||
struct DeviceInfo {
|
||||
pub tenant_id: String,
|
||||
pub tenant_name: String,
|
||||
pub application_id: String,
|
||||
pub application_name: String,
|
||||
pub device_profile_id: String,
|
||||
pub device_profile_name: String,
|
||||
pub device_name: String,
|
||||
pub dev_eui: String,
|
||||
pub device_class_enabled: String,
|
||||
pub tags: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl From<integration::DeviceInfo> for DeviceInfo {
|
||||
fn from(value: integration::DeviceInfo) -> Self {
|
||||
DeviceInfo {
|
||||
tenant_id: value.tenant_id.clone(),
|
||||
tenant_name: value.tenant_name.clone(),
|
||||
application_id: value.application_id.clone(),
|
||||
application_name: value.application_name.clone(),
|
||||
device_profile_id: value.device_profile_id.clone(),
|
||||
device_profile_name: value.device_profile_name.clone(),
|
||||
device_name: value.device_name.clone(),
|
||||
dev_eui: value.dev_eui.clone(),
|
||||
device_class_enabled: value.device_class_enabled().as_str_name().into(),
|
||||
tags: value.tags.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
|
||||
struct UplinkEvent {
|
||||
pub device_info: Option<DeviceInfo>,
|
||||
pub time: Option<pbjson_types::Timestamp>,
|
||||
pub object: Option<pbjson_types::Struct>,
|
||||
}
|
||||
|
||||
impl From<integration::UplinkEvent> for UplinkEvent {
|
||||
fn from(value: integration::UplinkEvent) -> Self {
|
||||
UplinkEvent {
|
||||
device_info: value.device_info.map(|v| v.into()),
|
||||
time: value.time.clone(),
|
||||
object: value.object.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
|
||||
struct JoinEvent {
|
||||
pub device_info: Option<DeviceInfo>,
|
||||
pub time: Option<pbjson_types::Timestamp>,
|
||||
}
|
||||
|
||||
impl From<integration::JoinEvent> for JoinEvent {
|
||||
fn from(value: integration::JoinEvent) -> Self {
|
||||
JoinEvent {
|
||||
device_info: value.device_info.map(|v| v.into()),
|
||||
time: value.time.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Integration {
|
||||
token: String,
|
||||
}
|
||||
|
||||
impl Integration {
|
||||
pub fn new(conf: &BlynkConfiguration) -> Integration {
|
||||
trace!("Initializing Blynk integration");
|
||||
|
||||
Integration {
|
||||
token: conf.token.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_token(&self) -> Result<(String, String)> {
|
||||
let token_b = BASE64_STANDARD.decode(&self.token).context("Parse token")?;
|
||||
let token_str = String::from_utf8(token_b).context("Parse token")?;
|
||||
let token_url = Url::parse(&token_str).context("Parse token")?;
|
||||
|
||||
let integration_url = format!(
|
||||
"{}://{}{}{}",
|
||||
token_url.scheme(),
|
||||
token_url.host_str().unwrap_or_default(),
|
||||
token_url
|
||||
.port()
|
||||
.map(|v| format!(":{}", v))
|
||||
.unwrap_or_default(),
|
||||
token_url.path()
|
||||
);
|
||||
let params: HashMap<String, String> = token_url.query_pairs().into_owned().collect();
|
||||
Ok((
|
||||
integration_url,
|
||||
params.get("token").cloned().unwrap_or_default(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn post_event<T>(&self, event: &str, pl: &T) -> Result<()>
|
||||
where
|
||||
T: ?Sized + Serialize,
|
||||
{
|
||||
let (integration_url, integration_token) = self.parse_token()?;
|
||||
let b = serde_json::to_vec(pl)?;
|
||||
|
||||
info!(event = %event, url = %integration_url, "Posting event");
|
||||
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(CONTENT_TYPE, "application/json".parse().unwrap());
|
||||
headers.insert(
|
||||
AUTHORIZATION,
|
||||
format!("Bearer {}", integration_token).parse().unwrap(),
|
||||
);
|
||||
|
||||
get_client()
|
||||
.post(&integration_url)
|
||||
.body(b.to_vec())
|
||||
.query(&[("event", event)])
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl IntegrationTrait for Integration {
|
||||
async fn uplink_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<()> {
|
||||
self.post_event("up", &UplinkEvent::from(pl.clone())).await
|
||||
}
|
||||
|
||||
async fn join_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
pl: &integration::JoinEvent,
|
||||
) -> Result<()> {
|
||||
self.post_event("join", &JoinEvent::from(pl.clone())).await
|
||||
}
|
||||
|
||||
async fn ack_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::AckEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn txack_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::TxAckEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn log_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::LogEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn status_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::StatusEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn location_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::LocationEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn integration_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::IntegrationEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use httpmock::prelude::*;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_blynk() {
|
||||
let server = MockServer::start();
|
||||
let mut token = String::new();
|
||||
BASE64_STANDARD.encode_string(
|
||||
format!("{}?token=my-secret-token", server.url("/")),
|
||||
&mut token,
|
||||
);
|
||||
|
||||
let i = Integration { token };
|
||||
|
||||
// uplink event
|
||||
let pl: integration::UplinkEvent = integration::UplinkEvent {
|
||||
device_info: Some(integration::DeviceInfo {
|
||||
application_id: "app_id".into(),
|
||||
application_name: "app_name".into(),
|
||||
dev_eui: "0102030405060708".into(),
|
||||
device_name: "dev_name".into(),
|
||||
device_profile_id: "dp_id".into(),
|
||||
device_profile_name: "dp_name".into(),
|
||||
tenant_id: "t_id".into(),
|
||||
tenant_name: "t_name".into(),
|
||||
device_class_enabled: 0,
|
||||
tags: HashMap::new(),
|
||||
}),
|
||||
object: Some(pbjson_types::Struct::default()),
|
||||
time: Some(pbjson_types::Timestamp::default()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let pl_expected = UplinkEvent {
|
||||
device_info: Some(DeviceInfo {
|
||||
application_id: "app_id".into(),
|
||||
application_name: "app_name".into(),
|
||||
dev_eui: "0102030405060708".into(),
|
||||
device_name: "dev_name".into(),
|
||||
device_profile_id: "dp_id".into(),
|
||||
device_profile_name: "dp_name".into(),
|
||||
tenant_id: "t_id".into(),
|
||||
tenant_name: "t_name".into(),
|
||||
device_class_enabled: "CLASS_A".into(),
|
||||
tags: HashMap::new(),
|
||||
}),
|
||||
object: Some(pbjson_types::Struct::default()),
|
||||
time: Some(pbjson_types::Timestamp::default()),
|
||||
};
|
||||
|
||||
let pl_test: UplinkEvent = pl.clone().into();
|
||||
assert_eq!(pl_expected, pl_test);
|
||||
|
||||
let mut mock = server.mock(|when, then| {
|
||||
when.method(POST)
|
||||
.path("/")
|
||||
.query_param("event", "up")
|
||||
.header("Authorization", "Bearer my-secret-token")
|
||||
.body(serde_json::to_string(&pl_expected).unwrap());
|
||||
|
||||
then.status(200);
|
||||
});
|
||||
|
||||
i.uplink_event(&HashMap::new(), &pl).await.unwrap();
|
||||
|
||||
mock.assert();
|
||||
mock.delete();
|
||||
|
||||
// join event
|
||||
let pl: integration::JoinEvent = integration::JoinEvent {
|
||||
device_info: Some(integration::DeviceInfo {
|
||||
application_id: "app_id".into(),
|
||||
application_name: "app_name".into(),
|
||||
dev_eui: "0102030405060708".into(),
|
||||
device_name: "dev_name".into(),
|
||||
device_profile_id: "dp_id".into(),
|
||||
device_profile_name: "dp_name".into(),
|
||||
tenant_id: "t_id".into(),
|
||||
tenant_name: "t_name".into(),
|
||||
device_class_enabled: 0,
|
||||
tags: HashMap::new(),
|
||||
}),
|
||||
time: Some(pbjson_types::Timestamp::default()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let pl_expected = JoinEvent {
|
||||
device_info: Some(DeviceInfo {
|
||||
application_id: "app_id".into(),
|
||||
application_name: "app_name".into(),
|
||||
dev_eui: "0102030405060708".into(),
|
||||
device_name: "dev_name".into(),
|
||||
device_profile_id: "dp_id".into(),
|
||||
device_profile_name: "dp_name".into(),
|
||||
tenant_id: "t_id".into(),
|
||||
tenant_name: "t_name".into(),
|
||||
device_class_enabled: "CLASS_A".into(),
|
||||
tags: HashMap::new(),
|
||||
}),
|
||||
time: Some(pbjson_types::Timestamp::default()),
|
||||
};
|
||||
|
||||
let pl_test: JoinEvent = pl.clone().into();
|
||||
assert_eq!(pl_expected, pl_test);
|
||||
|
||||
let mut mock = server.mock(|when, then| {
|
||||
when.method(POST)
|
||||
.path("/")
|
||||
.query_param("event", "join")
|
||||
.header("Authorization", "Bearer my-secret-token")
|
||||
.body(serde_json::to_string(&pl_expected).unwrap());
|
||||
|
||||
then.status(200);
|
||||
});
|
||||
|
||||
i.join_event(&HashMap::new(), &pl).await.unwrap();
|
||||
|
||||
mock.assert();
|
||||
mock.delete();
|
||||
}
|
||||
}
|
@@ -1,99 +0,0 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use prost::Message;
|
||||
use tracing::{info, trace};
|
||||
|
||||
use crate::storage::{get_async_redis_conn, redis_key};
|
||||
use chirpstack_api::{gw, internal};
|
||||
use lrwn::EUI64;
|
||||
|
||||
pub async fn get_geoloc_buffer(
|
||||
dev_eui: &EUI64,
|
||||
ttl: Duration,
|
||||
) -> Result<Vec<Vec<gw::UplinkRxInfo>>> {
|
||||
if ttl == Duration::zero() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
trace!(dev_eui = %dev_eui, "Getting geolocation buffer");
|
||||
let key = redis_key(format!("device:{{{}}}:loracloud:buffer", dev_eui));
|
||||
|
||||
let b: Vec<u8> = redis::cmd("GET")
|
||||
.arg(key)
|
||||
.query_async(&mut get_async_redis_conn().await?)
|
||||
.await
|
||||
.context("Get geolocation buffer")?;
|
||||
if b.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let buffer = internal::LoraCloudGeolocBuffer::decode(&mut Cursor::new(b))
|
||||
.context("Decode geolocation buffer")?;
|
||||
|
||||
let mut out: Vec<Vec<gw::UplinkRxInfo>> = Vec::new();
|
||||
|
||||
for uplink in &buffer.uplinks {
|
||||
let rx_info: Vec<gw::UplinkRxInfo> = uplink
|
||||
.rx_info
|
||||
.iter()
|
||||
.filter(|&rx_info| {
|
||||
let ts: DateTime<Utc> = match &rx_info.gw_time {
|
||||
None => {
|
||||
return false;
|
||||
}
|
||||
Some(v) => match (*v).try_into() {
|
||||
Ok(v) => v,
|
||||
Err(_) => {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// The interval between now and then must be smaller than the TTL
|
||||
(ts - Utc::now()) < ttl
|
||||
})
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
if rx_info.len() > 3 {
|
||||
out.push(rx_info);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
pub async fn save_geoloc_buffer(
|
||||
dev_eui: &EUI64,
|
||||
ttl: &Duration,
|
||||
items: &[Vec<gw::UplinkRxInfo>],
|
||||
) -> Result<()> {
|
||||
if *ttl == Duration::zero() || items.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
trace!(dev_eui = %dev_eui, "Saving geolocation buffer");
|
||||
let key = redis_key(format!("device:{{{}}}:loracloud:buffer", dev_eui));
|
||||
|
||||
let buffer = internal::LoraCloudGeolocBuffer {
|
||||
uplinks: items
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|rx_info| internal::LoraCloudGeolocBufferUplink { rx_info })
|
||||
.collect(),
|
||||
};
|
||||
let b = buffer.encode_to_vec();
|
||||
|
||||
() = redis::cmd("PSETEX")
|
||||
.arg(key)
|
||||
.arg(ttl.num_milliseconds())
|
||||
.arg(b)
|
||||
.query_async(&mut get_async_redis_conn().await?)
|
||||
.await?;
|
||||
|
||||
info!(dev_eui = %dev_eui, "Geolocation buffer saved");
|
||||
|
||||
Ok(())
|
||||
}
|
@@ -1,695 +0,0 @@
|
||||
use std::fmt;
|
||||
use std::sync::OnceLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use chirpstack_api::{common, gw};
|
||||
use reqwest::header::{HeaderMap, HeaderName, CONTENT_TYPE};
|
||||
use reqwest::Client;
|
||||
use serde::de::{self, Visitor};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::gpstime::ToGpsTime;
|
||||
use crate::uplink::helpers;
|
||||
use lrwn::EUI64;
|
||||
|
||||
static CLIENT: OnceLock<Client> = OnceLock::new();
|
||||
|
||||
fn get_client() -> Client {
|
||||
CLIENT
|
||||
.get_or_init(|| {
|
||||
Client::builder()
|
||||
.timeout(Duration::from_secs(5))
|
||||
.build()
|
||||
.unwrap()
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum Error {
|
||||
#[error("No location")]
|
||||
NoLocation,
|
||||
|
||||
#[error(transparent)]
|
||||
AnyhowError(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
pub struct ApiClient {
|
||||
uri: String,
|
||||
token: String,
|
||||
}
|
||||
|
||||
impl ApiClient {
|
||||
pub fn new(uri: &str, token: &str) -> ApiClient {
|
||||
ApiClient {
|
||||
uri: uri.to_string(),
|
||||
token: token.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn tdoa_single_frame(
|
||||
&self,
|
||||
rx_info: &[gw::UplinkRxInfo],
|
||||
) -> Result<common::Location> {
|
||||
let req = TdoaSingleFrameRequest::new(rx_info);
|
||||
let resp = self
|
||||
.request("/api/v1/solve/tdoa", &serde_json::to_string(&req)?)
|
||||
.await?;
|
||||
Ok(resp.into_location(common::LocationSource::GeoResolverTdoa)?)
|
||||
}
|
||||
|
||||
pub async fn tdoa_multi_frame(
|
||||
&self,
|
||||
rx_info: &[Vec<gw::UplinkRxInfo>],
|
||||
) -> Result<common::Location> {
|
||||
let req = TdoaMultiFrameRequest::new(rx_info);
|
||||
let resp = self
|
||||
.request(
|
||||
"/api/v1/solve/tdoaMultiframe",
|
||||
&serde_json::to_string(&req)?,
|
||||
)
|
||||
.await?;
|
||||
Ok(resp.into_location(common::LocationSource::GeoResolverTdoa)?)
|
||||
}
|
||||
|
||||
pub async fn rssi_single_frame(
|
||||
&self,
|
||||
rx_info: &[gw::UplinkRxInfo],
|
||||
) -> Result<common::Location> {
|
||||
let req = RssiSingleFrameRequest::new(rx_info);
|
||||
let resp = self
|
||||
.request("/api/v2/rssi", &serde_json::to_string(&req)?)
|
||||
.await?;
|
||||
Ok(resp.into_location(common::LocationSource::GeoResolverRssi)?)
|
||||
}
|
||||
|
||||
pub async fn rssi_multi_frame(
|
||||
&self,
|
||||
rx_info: &[Vec<gw::UplinkRxInfo>],
|
||||
) -> Result<common::Location> {
|
||||
let req = RssiMultiFrameRequest::new(rx_info);
|
||||
let resp = self
|
||||
.request(
|
||||
"/api/v1/solve/rssiMultiframe",
|
||||
&serde_json::to_string(&req)?,
|
||||
)
|
||||
.await?;
|
||||
Ok(resp.into_location(common::LocationSource::GeoResolverRssi)?)
|
||||
}
|
||||
|
||||
pub async fn wifi_tdoa_single_frame(
|
||||
&self,
|
||||
rx_info: &[gw::UplinkRxInfo],
|
||||
aps: &[WifiAccessPoint],
|
||||
) -> Result<common::Location> {
|
||||
let req = WifiTdoaSingleFrameRequest::new(rx_info, aps);
|
||||
let resp = self
|
||||
.request("/api/v1/solve/loraWifi", &serde_json::to_string(&req)?)
|
||||
.await?;
|
||||
Ok(resp.into_location(common::LocationSource::GeoResolverWifi)?)
|
||||
}
|
||||
|
||||
pub async fn gnss_lr1110_single_frame(
|
||||
&self,
|
||||
rx_info: &[gw::UplinkRxInfo],
|
||||
use_rx_time: bool,
|
||||
pl: &[u8],
|
||||
) -> Result<common::Location> {
|
||||
let req = GnssLr1110SingleFrameRequest::new(rx_info, use_rx_time, pl);
|
||||
let resp = self
|
||||
.v3_request(
|
||||
"/api/v1/solve/gnss_lr1110_singleframe",
|
||||
&serde_json::to_string(&req)?,
|
||||
)
|
||||
.await?;
|
||||
Ok(resp.into_location(common::LocationSource::GeoResolverGnss)?)
|
||||
}
|
||||
|
||||
pub async fn uplink_send(&self, req: &UplinkRequest) -> Result<UplinkResponse> {
|
||||
let endpoint = format!("{}/api/v1/device/send", self.uri);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(CONTENT_TYPE, "application/json".parse().unwrap());
|
||||
headers.insert(
|
||||
HeaderName::try_from("Ocp-Apim-Subscription-Key").unwrap(),
|
||||
self.token.parse()?,
|
||||
);
|
||||
|
||||
let res = get_client()
|
||||
.post(endpoint)
|
||||
.headers(headers)
|
||||
.json(req)
|
||||
.send()
|
||||
.await?;
|
||||
let res = res.error_for_status()?;
|
||||
|
||||
Ok(res.json::<UplinkResponse>().await?)
|
||||
}
|
||||
|
||||
async fn request(&self, endpoint: &str, body: &str) -> Result<Response> {
|
||||
let endpoint = format!("{}{}", self.uri, endpoint);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(CONTENT_TYPE, "application/json".parse().unwrap());
|
||||
headers.insert(
|
||||
HeaderName::try_from("Ocp-Apim-Subscription-Key").unwrap(),
|
||||
self.token.parse()?,
|
||||
);
|
||||
|
||||
let res = get_client()
|
||||
.post(endpoint)
|
||||
.body(body.to_string())
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let res = res.error_for_status()?;
|
||||
|
||||
Ok(res.json::<Response>().await?)
|
||||
}
|
||||
|
||||
async fn v3_request(&self, endpoint: &str, body: &str) -> Result<V3Response> {
|
||||
let endpoint = format!("{}{}", self.uri, endpoint);
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(CONTENT_TYPE, "application/json".parse().unwrap());
|
||||
headers.insert(
|
||||
HeaderName::try_from("Ocp-Apim-Subscription-Key").unwrap(),
|
||||
self.token.parse()?,
|
||||
);
|
||||
|
||||
let res = get_client()
|
||||
.post(endpoint)
|
||||
.body(body.to_string())
|
||||
.headers(headers)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let res = res.error_for_status()?;
|
||||
Ok(res.json::<V3Response>().await?)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct TdoaSingleFrameRequest {
|
||||
pub lorawan: Vec<UplinkTdoa>,
|
||||
}
|
||||
|
||||
impl TdoaSingleFrameRequest {
|
||||
pub fn new(rx_info: &[gw::UplinkRxInfo]) -> Self {
|
||||
TdoaSingleFrameRequest {
|
||||
lorawan: rx_info.iter().map(UplinkTdoa::new).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct TdoaMultiFrameRequest {
|
||||
pub lorawan: Vec<Vec<UplinkTdoa>>,
|
||||
}
|
||||
|
||||
impl TdoaMultiFrameRequest {
|
||||
pub fn new(rx_info: &[Vec<gw::UplinkRxInfo>]) -> Self {
|
||||
TdoaMultiFrameRequest {
|
||||
lorawan: rx_info
|
||||
.iter()
|
||||
.map(|i| i.iter().map(UplinkTdoa::new).collect())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct RssiSingleFrameRequest {
|
||||
pub lorawan: Vec<UplinkRssi>,
|
||||
}
|
||||
|
||||
impl RssiSingleFrameRequest {
|
||||
pub fn new(rx_info: &[gw::UplinkRxInfo]) -> Self {
|
||||
RssiSingleFrameRequest {
|
||||
lorawan: rx_info.iter().map(UplinkRssi::new).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct RssiMultiFrameRequest {
|
||||
pub lorawan: Vec<Vec<UplinkRssi>>,
|
||||
}
|
||||
|
||||
impl RssiMultiFrameRequest {
|
||||
pub fn new(rx_info: &[Vec<gw::UplinkRxInfo>]) -> Self {
|
||||
RssiMultiFrameRequest {
|
||||
lorawan: rx_info
|
||||
.iter()
|
||||
.map(|i| i.iter().map(UplinkRssi::new).collect())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct WifiTdoaSingleFrameRequest {
|
||||
pub lorawan: Vec<UplinkTdoa>,
|
||||
#[serde(rename = "wifiAccessPoints")]
|
||||
pub wifi_access_points: Vec<WifiAccessPoint>,
|
||||
}
|
||||
|
||||
impl WifiTdoaSingleFrameRequest {
|
||||
pub fn new(rx_info: &[gw::UplinkRxInfo], aps: &[WifiAccessPoint]) -> Self {
|
||||
WifiTdoaSingleFrameRequest {
|
||||
lorawan: rx_info.iter().map(UplinkTdoa::new).collect(),
|
||||
wifi_access_points: aps.to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct GnssLr1110SingleFrameRequest {
|
||||
pub payload: String,
|
||||
#[serde(rename = "gnss_capture_time", skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_capture_time: Option<f64>,
|
||||
#[serde(
|
||||
rename = "gnss_capture_time_accuracy",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub gnss_capture_time_accuracy: Option<f64>,
|
||||
#[serde(
|
||||
rename = "gnss_assist_position",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub gnss_assist_position: Option<Vec<f64>>,
|
||||
#[serde(
|
||||
rename = "gnss_assist_altitude",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub gnss_assist_altitude: Option<f64>,
|
||||
#[serde(rename = "gnss_use_2D_solver")]
|
||||
pub gnss_use_2d_solver: bool,
|
||||
}
|
||||
|
||||
impl GnssLr1110SingleFrameRequest {
|
||||
pub fn new(rx_info: &[gw::UplinkRxInfo], use_rx_time: bool, pl: &[u8]) -> Self {
|
||||
GnssLr1110SingleFrameRequest {
|
||||
payload: hex::encode(pl),
|
||||
gnss_capture_time: match use_rx_time {
|
||||
false => None,
|
||||
true => match helpers::get_time_since_gps_epoch(rx_info) {
|
||||
Some(v) => Some(v.as_secs_f64()),
|
||||
None => Some(
|
||||
chrono::Utc::now()
|
||||
.to_gps_time()
|
||||
.to_std()
|
||||
.unwrap_or_default()
|
||||
.as_secs_f64(),
|
||||
),
|
||||
},
|
||||
},
|
||||
gnss_capture_time_accuracy: None,
|
||||
gnss_assist_position: helpers::get_start_location(rx_info)
|
||||
.map(|loc| vec![loc.latitude, loc.longitude]),
|
||||
gnss_assist_altitude: helpers::get_start_location(rx_info).map(|loc| loc.altitude),
|
||||
gnss_use_2d_solver: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, Clone)]
|
||||
#[serde(default)]
|
||||
pub struct Response {
|
||||
pub result: Option<LocationResult>,
|
||||
pub errors: Vec<String>,
|
||||
pub warnings: Vec<String>,
|
||||
}
|
||||
|
||||
impl Response {
|
||||
fn into_location(self, source: common::LocationSource) -> Result<common::Location, Error> {
|
||||
if !self.errors.is_empty() {
|
||||
return Err(Error::AnyhowError(anyhow!(
|
||||
"api returned errors: {}",
|
||||
self.errors.join(", ")
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(loc) = &self.result {
|
||||
return Ok(common::Location {
|
||||
latitude: loc.latitude,
|
||||
longitude: loc.longitude,
|
||||
altitude: loc.altitude,
|
||||
source: source.into(),
|
||||
accuracy: loc.accuracy.unwrap_or_default() as f32,
|
||||
});
|
||||
}
|
||||
|
||||
Err(Error::NoLocation)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, Clone)]
|
||||
#[serde(default)]
|
||||
pub struct V3Response {
|
||||
pub result: Option<LocationSolverResult>,
|
||||
pub errors: Vec<String>,
|
||||
pub warnings: Vec<String>,
|
||||
}
|
||||
|
||||
impl V3Response {
|
||||
fn into_location(self, source: common::LocationSource) -> Result<common::Location, Error> {
|
||||
if !self.errors.is_empty() {
|
||||
return Err(Error::AnyhowError(anyhow!(
|
||||
"api returned errors: {}",
|
||||
self.errors.join(", ")
|
||||
)));
|
||||
}
|
||||
|
||||
if let Some(loc) = &self.result {
|
||||
if loc.llh.len() != 3 {
|
||||
return Err(Error::AnyhowError(anyhow!("LLH must contain 3 items")));
|
||||
}
|
||||
|
||||
return Ok(common::Location {
|
||||
latitude: loc.llh[0],
|
||||
longitude: loc.llh[1],
|
||||
altitude: loc.llh[2],
|
||||
source: source.into(),
|
||||
accuracy: loc.accuracy as f32,
|
||||
});
|
||||
}
|
||||
|
||||
Err(Error::NoLocation)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct LocationResult {
|
||||
pub latitude: f64,
|
||||
pub longitude: f64,
|
||||
pub altitude: f64,
|
||||
pub accuracy: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct LocationSolverResult {
|
||||
pub llh: Vec<f64>,
|
||||
pub accuracy: f64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct UplinkTdoa {
|
||||
#[serde(rename = "gatewayId")]
|
||||
pub gateway_id: String,
|
||||
pub rssi: f64,
|
||||
pub snr: f32,
|
||||
pub toa: u32,
|
||||
#[serde(rename = "antennaId")]
|
||||
pub antenna_id: u32,
|
||||
#[serde(rename = "antennaLocation")]
|
||||
pub antenna_location: AntennaLocation,
|
||||
}
|
||||
|
||||
impl UplinkTdoa {
|
||||
pub fn new(rx_info: &gw::UplinkRxInfo) -> Self {
|
||||
UplinkTdoa {
|
||||
gateway_id: hex::encode(&rx_info.gateway_id),
|
||||
rssi: rx_info.rssi.into(),
|
||||
snr: rx_info.snr,
|
||||
antenna_id: rx_info.antenna,
|
||||
antenna_location: match &rx_info.location {
|
||||
Some(loc) => AntennaLocation {
|
||||
latitude: loc.latitude,
|
||||
longitude: loc.longitude,
|
||||
altitude: loc.altitude,
|
||||
},
|
||||
None => AntennaLocation {
|
||||
latitude: 0.0,
|
||||
longitude: 0.0,
|
||||
altitude: 0.0,
|
||||
},
|
||||
},
|
||||
toa: match &rx_info.fine_time_since_gps_epoch {
|
||||
Some(v) => v.nanos as u32,
|
||||
None => 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct UplinkRssi {
|
||||
#[serde(rename = "gatewayId")]
|
||||
pub gateway_id: String,
|
||||
pub rssi: f64,
|
||||
pub snr: f32,
|
||||
#[serde(rename = "antennaId")]
|
||||
pub antenna_id: u32,
|
||||
#[serde(rename = "antennaLocation")]
|
||||
pub antenna_location: AntennaLocation,
|
||||
}
|
||||
|
||||
impl UplinkRssi {
|
||||
pub fn new(rx_info: &gw::UplinkRxInfo) -> Self {
|
||||
UplinkRssi {
|
||||
gateway_id: hex::encode(&rx_info.gateway_id),
|
||||
rssi: rx_info.rssi.into(),
|
||||
snr: rx_info.snr,
|
||||
antenna_id: rx_info.antenna,
|
||||
antenna_location: match &rx_info.location {
|
||||
Some(loc) => AntennaLocation {
|
||||
latitude: loc.latitude,
|
||||
longitude: loc.longitude,
|
||||
altitude: loc.altitude,
|
||||
},
|
||||
None => AntennaLocation {
|
||||
latitude: 0.0,
|
||||
longitude: 0.0,
|
||||
altitude: 0.0,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Default)]
|
||||
pub struct WifiAccessPoint {
|
||||
#[serde(rename = "macAddress")]
|
||||
pub mac_address: String,
|
||||
#[serde(rename = "signalStrength")]
|
||||
pub signal_strength: isize,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct AntennaLocation {
|
||||
pub latitude: f64,
|
||||
pub longitude: f64,
|
||||
pub altitude: f64,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum UplinkMsg {
|
||||
UpDf(UplinkMsgUpDf),
|
||||
Gnss(UplinkMsgGnss),
|
||||
Wifi(UplinkMsgWifi),
|
||||
Joining(UplinkMsgJoining),
|
||||
}
|
||||
|
||||
impl Serialize for UplinkMsg {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match self {
|
||||
UplinkMsg::UpDf(v) => v.serialize(serializer),
|
||||
UplinkMsg::Gnss(v) => v.serialize(serializer),
|
||||
UplinkMsg::Wifi(v) => v.serialize(serializer),
|
||||
UplinkMsg::Joining(v) => v.serialize(serializer),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Eui64Wrapper(EUI64);
|
||||
|
||||
impl Eui64Wrapper {
|
||||
pub fn new(eui64: &EUI64) -> Self {
|
||||
Eui64Wrapper(*eui64)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Eui64Wrapper {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut parts: Vec<String> = Vec::new();
|
||||
for b in &self.0.to_be_bytes() {
|
||||
parts.push(hex::encode(vec![*b]));
|
||||
}
|
||||
|
||||
serializer.serialize_str(&parts.join("-"))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Eui64Wrapper {
|
||||
fn deserialize<D>(deserialize: D) -> Result<Eui64Wrapper, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
deserialize.deserialize_str(Eui64WrapperVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
struct Eui64WrapperVisitor;
|
||||
|
||||
impl Visitor<'_> for Eui64WrapperVisitor {
|
||||
type Value = Eui64Wrapper;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||
formatter.write_str("An EUI64 in the format of 01-02-03-04-05-06-07-08 is expected")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: de::Error,
|
||||
{
|
||||
let s = value.to_string().replace('-', "");
|
||||
let b = hex::decode(s).map_err(|e| E::custom(format!("{}", e)))?;
|
||||
let eui64 = EUI64::from_slice(&b).map_err(|e| E::custom(format!("{}", e)))?;
|
||||
Ok(Eui64Wrapper(eui64))
|
||||
}
|
||||
}
|
||||
|
||||
// UplinkMsgUpDf implements the LoRa Cloud UplinkMsg object.
|
||||
#[derive(Default, Serialize, Clone)]
|
||||
pub struct UplinkMsgUpDf {
|
||||
#[serde(rename = "msgtype")]
|
||||
pub msg_type: String, // must be set to "updf"
|
||||
#[serde(rename = "fcnt")]
|
||||
pub f_cnt: u32,
|
||||
pub port: u8,
|
||||
pub dr: u8,
|
||||
pub freq: u32,
|
||||
pub timestamp: f64, // senconds since UTC
|
||||
pub payload: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_capture_time: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_capture_time_accuracy: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_assist_position: Option<Vec<f64>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_assist_altitude: Option<f64>,
|
||||
}
|
||||
|
||||
// UplinkMsgGnss implements the LoRa Cloud UplinkMsg object containing a gnss payload.
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct UplinkMsgGnss {
|
||||
#[serde(rename = "msgtype")]
|
||||
pub msg_type: String, // must be set to "GNSS"
|
||||
pub payload: String, // HEX format
|
||||
pub timestamp: f64, // seconds since UTC
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_capture_time: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_capture_time_accuracy: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_assist_position: Option<Vec<f64>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub gnss_assist_altitude: Option<f64>,
|
||||
}
|
||||
|
||||
// UplinkMsgWifi implements the LoRa Cloud UplinkMsg object containing a wifi payload.
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct UplinkMsgWifi {
|
||||
#[serde(rename = "msgtype")]
|
||||
pub msg_type: String, // must be set to "wifi"
|
||||
pub payload: String, // HEX
|
||||
pub timestamp: f64, // seconds since UTC
|
||||
}
|
||||
|
||||
// UplinkMsgJoining implements the LoRa Cloud UplinkMsg object indicating a session reset.
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct UplinkMsgJoining {
|
||||
#[serde(rename = "msgtype")]
|
||||
pub msg_type: String, // must be set to "joining"
|
||||
pub timestamp: f64, // seconds since UTC
|
||||
}
|
||||
|
||||
// UplinkResponse holds the response for a single DevEUI.
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
#[serde(default)]
|
||||
pub struct UplinkResponse {
|
||||
pub result: UplinkResponseResult,
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
// UplinkResponseResult holds the response result.
|
||||
#[derive(Serialize, Deserialize, Clone, Default)]
|
||||
#[serde(default)]
|
||||
pub struct UplinkResponseResult {
|
||||
pub file: serde_json::Value,
|
||||
pub stream_records: Option<StreamUpdate>,
|
||||
pub position_solution: Option<PositionSolution>,
|
||||
pub fulfilled_requests: serde_json::Value,
|
||||
#[serde(rename = "fports")]
|
||||
pub f_ports: serde_json::Value,
|
||||
pub info_fields: serde_json::Value,
|
||||
pub pending_requests: serde_json::Value,
|
||||
pub log_messages: serde_json::Value,
|
||||
#[serde(rename = "dnlink")]
|
||||
pub downlink: Option<LoraDownlink>,
|
||||
}
|
||||
|
||||
// StreamUpdate lists both the signals and the fully-assembled streaming records that are received by the decoder.
|
||||
// Each entry denotes an assembled packet with application data and record offset.
|
||||
pub type StreamUpdate = Vec<Vec<serde_json::Value>>;
|
||||
|
||||
// LoRaDownlink implements the LoRa Cloud LoRaDownlink object.
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct LoraDownlink {
|
||||
pub port: u8,
|
||||
pub payload: String, // HEX
|
||||
}
|
||||
|
||||
// PositionSolution implements the Positition Solution object.
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct PositionSolution {
|
||||
pub algorithm_type: Option<String>,
|
||||
pub ecef: Option<Vec<f64>>,
|
||||
pub llh: Vec<f64>,
|
||||
pub capture_time_gps: Option<f64>,
|
||||
pub gdop: Option<f64>,
|
||||
pub accuracy: Option<f32>,
|
||||
pub timestamp: f64,
|
||||
}
|
||||
|
||||
// UplinkRequest implements the LoRa Cloud uplink/send request.
|
||||
#[derive(Serialize, Clone)]
|
||||
pub struct UplinkRequest {
|
||||
#[serde(rename = "deveui")]
|
||||
pub dev_eui: Eui64Wrapper,
|
||||
pub uplink: UplinkMsg,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_uplink_request_serizalization() {
|
||||
let updf = UplinkRequest {
|
||||
dev_eui: Eui64Wrapper::new(&EUI64::from_be_bytes([1, 2, 3, 4, 5, 6, 7, 8])),
|
||||
uplink: UplinkMsg::UpDf(UplinkMsgUpDf {
|
||||
msg_type: "updf".into(),
|
||||
f_cnt: 10,
|
||||
port: 2,
|
||||
dr: 1,
|
||||
freq: 868100000,
|
||||
timestamp: 12345.0,
|
||||
payload: "".into(),
|
||||
..Default::default()
|
||||
}),
|
||||
};
|
||||
let json_s = serde_json::to_string(&updf).unwrap();
|
||||
|
||||
assert_eq!("{\"deveui\":\"01-02-03-04-05-06-07-08\",\"uplink\":{\"msgtype\":\"updf\",\"fcnt\":10,\"port\":2,\"dr\":1,\"freq\":868100000,\"timestamp\":12345.0,\"payload\":\"\"}}", json_s);
|
||||
}
|
||||
}
|
@@ -1,58 +0,0 @@
|
||||
pub fn serde_json_to_pb_json(val: &serde_json::Value) -> pbjson_types::Struct {
|
||||
// Initial value must be an object.
|
||||
if let serde_json::Value::Object(_) = val {
|
||||
if let Some(pbjson_types::value::Kind::StructValue(v)) = _serde_json_to_pb_json(val) {
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn _serde_json_to_pb_json(val: &serde_json::Value) -> Option<pbjson_types::value::Kind> {
|
||||
match val {
|
||||
serde_json::Value::Null => None,
|
||||
serde_json::Value::Bool(v) => Some(pbjson_types::value::Kind::BoolValue(*v)),
|
||||
serde_json::Value::Number(v) => {
|
||||
if v.is_f64() {
|
||||
Some(pbjson_types::value::Kind::NumberValue(v.as_f64().unwrap()))
|
||||
} else if v.is_i64() {
|
||||
Some(pbjson_types::value::Kind::NumberValue(
|
||||
v.as_i64().unwrap() as f64
|
||||
))
|
||||
} else if v.is_u64() {
|
||||
Some(pbjson_types::value::Kind::NumberValue(
|
||||
v.as_u64().unwrap() as f64
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
serde_json::Value::String(v) => Some(pbjson_types::value::Kind::StringValue(v.clone())),
|
||||
serde_json::Value::Array(v) => Some(pbjson_types::value::Kind::ListValue(
|
||||
pbjson_types::ListValue {
|
||||
values: v
|
||||
.iter()
|
||||
.map(|v| pbjson_types::Value {
|
||||
kind: _serde_json_to_pb_json(v),
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
)),
|
||||
serde_json::Value::Object(v) => Some(pbjson_types::value::Kind::StructValue(
|
||||
pbjson_types::Struct {
|
||||
fields: v
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k.clone(),
|
||||
pbjson_types::Value {
|
||||
kind: _serde_json_to_pb_json(v),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
)),
|
||||
}
|
||||
}
|
@@ -1,792 +0,0 @@
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use async_recursion::async_recursion;
|
||||
use async_trait::async_trait;
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use tracing::{info, trace, warn};
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::Integration as IntegrationTrait;
|
||||
use crate::gpstime::ToGpsTime;
|
||||
use crate::helpers::errors::PrintFullError;
|
||||
use crate::integration::{integration_event, location_event};
|
||||
use crate::storage::application::LoraCloudConfiguration;
|
||||
use crate::storage::device_queue;
|
||||
use crate::uplink::helpers::{get_start_location, get_time_since_gps_epoch_chrono};
|
||||
use chirpstack_api::{common, gw, integration};
|
||||
use lrwn::EUI64;
|
||||
|
||||
mod buffer;
|
||||
mod client;
|
||||
mod convert;
|
||||
|
||||
pub struct Integration {
|
||||
client: client::ApiClient,
|
||||
config: LoraCloudConfiguration,
|
||||
}
|
||||
|
||||
impl Integration {
|
||||
pub fn new(conf: &LoraCloudConfiguration) -> Integration {
|
||||
trace!("Initializing LoRa Cloud integration");
|
||||
|
||||
Integration {
|
||||
client: client::ApiClient::new(
|
||||
"https://mgs.loracloud.com",
|
||||
&conf.modem_geolocation_services.token,
|
||||
),
|
||||
config: conf.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn modem_joining(&self, pl: &integration::JoinEvent) -> Result<()> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
|
||||
info!(dev_eui = %di.dev_eui, "Forwarding join notification");
|
||||
let ts: DateTime<Utc> = (*pl.time.as_ref().unwrap())
|
||||
.try_into()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let dev_eui = EUI64::from_str(&di.dev_eui)?;
|
||||
|
||||
let pl = client::UplinkRequest {
|
||||
dev_eui: client::Eui64Wrapper::new(&dev_eui),
|
||||
uplink: client::UplinkMsg::Joining(client::UplinkMsgJoining {
|
||||
msg_type: "joining".into(),
|
||||
timestamp: ts.timestamp_millis() as f64 / 1000.0,
|
||||
}),
|
||||
};
|
||||
|
||||
let _ = self.client.uplink_send(&pl).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn modem_updf(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<()> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
|
||||
info!(dev_eui = %di.dev_eui, "Forwarding updf message");
|
||||
let ts: DateTime<Utc> = (*pl.time.as_ref().unwrap())
|
||||
.try_into()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let dev_eui = EUI64::from_str(&di.dev_eui)?;
|
||||
|
||||
let req = client::UplinkRequest {
|
||||
dev_eui: client::Eui64Wrapper::new(&dev_eui),
|
||||
uplink: client::UplinkMsg::UpDf({
|
||||
let mut msg_updf = client::UplinkMsgUpDf {
|
||||
msg_type: "updf".into(),
|
||||
f_cnt: pl.f_cnt,
|
||||
port: pl.f_port as u8,
|
||||
dr: pl.dr as u8,
|
||||
freq: pl.tx_info.as_ref().unwrap().frequency,
|
||||
timestamp: ts.timestamp_millis() as f64 / 1000.0,
|
||||
payload: hex::encode(&pl.data),
|
||||
gnss_capture_time: match self.config.modem_geolocation_services.gnss_use_rx_time
|
||||
{
|
||||
false => None,
|
||||
true => {
|
||||
let ts = match get_time_since_gps_epoch_chrono(&pl.rx_info) {
|
||||
Some(v) => v,
|
||||
None => Utc::now().to_gps_time(),
|
||||
};
|
||||
|
||||
// Compensate for gnss scanning time and uplink.
|
||||
let ts = ts - Duration::try_seconds(6).unwrap();
|
||||
Some(ts.num_seconds() as f64)
|
||||
}
|
||||
},
|
||||
gnss_capture_time_accuracy: match self
|
||||
.config
|
||||
.modem_geolocation_services
|
||||
.gnss_use_rx_time
|
||||
{
|
||||
false => None,
|
||||
true => Some(15.0),
|
||||
},
|
||||
gnss_assist_position: None,
|
||||
gnss_assist_altitude: None,
|
||||
};
|
||||
|
||||
if self
|
||||
.config
|
||||
.modem_geolocation_services
|
||||
.gnss_use_gateway_location
|
||||
{
|
||||
if let Some(loc) = get_start_location(&pl.rx_info) {
|
||||
msg_updf.gnss_assist_position = Some(vec![loc.latitude, loc.longitude]);
|
||||
msg_updf.gnss_assist_altitude = Some(loc.altitude);
|
||||
}
|
||||
}
|
||||
|
||||
msg_updf
|
||||
}),
|
||||
};
|
||||
|
||||
let resp = self.client.uplink_send(&req).await?;
|
||||
|
||||
self.handle_modem_response(vars, pl, &resp, common::LocationSource::GeoResolverGnss)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn modem_metadata(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<()> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
info!(dev_eui = %di.dev_eui, "Forwarding uplink meta-data");
|
||||
let ts: DateTime<Utc> = (*pl.time.as_ref().unwrap())
|
||||
.try_into()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let dev_eui = EUI64::from_str(&di.dev_eui)?;
|
||||
|
||||
let req = client::UplinkRequest {
|
||||
dev_eui: client::Eui64Wrapper::new(&dev_eui),
|
||||
uplink: client::UplinkMsg::UpDf(client::UplinkMsgUpDf {
|
||||
msg_type: "updf".into(),
|
||||
f_cnt: pl.f_cnt,
|
||||
port: pl.f_port as u8,
|
||||
dr: pl.dr as u8,
|
||||
freq: pl.tx_info.as_ref().unwrap().frequency,
|
||||
timestamp: ts.timestamp_millis() as f64 / 1000.0,
|
||||
payload: "".into(),
|
||||
..Default::default()
|
||||
}),
|
||||
};
|
||||
|
||||
let resp = self.client.uplink_send(&req).await?;
|
||||
self.handle_modem_response(vars, pl, &resp, common::LocationSource::Unknown)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[async_recursion]
|
||||
async fn handle_modem_response(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
resp: &client::UplinkResponse,
|
||||
loc_source: common::LocationSource,
|
||||
) -> Result<()> {
|
||||
trace!("Handling modem uplink response");
|
||||
if !resp.error.is_empty() {
|
||||
return Err(anyhow!("{}", resp.error));
|
||||
}
|
||||
|
||||
self.handle_response_integration_event(vars, pl, &resp.result)
|
||||
.await?;
|
||||
|
||||
if self.config.modem_geolocation_services.parse_tlv && resp.result.stream_records.is_some()
|
||||
{
|
||||
self.handle_response_tlv_records(
|
||||
vars,
|
||||
pl,
|
||||
resp.result.stream_records.as_ref().unwrap(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if let Some(v) = &resp.result.downlink {
|
||||
self.handle_response_downlink(pl, v).await?;
|
||||
}
|
||||
|
||||
if let Some(v) = &resp.result.position_solution {
|
||||
self.handle_response_position(vars, pl, v, loc_source)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_response_tlv_records(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
stream: &[Vec<serde_json::Value>],
|
||||
) -> Result<()> {
|
||||
trace!("Handling TLV records");
|
||||
|
||||
let mut payloads: Vec<Vec<u8>> = Vec::new();
|
||||
|
||||
// Parse all payloads from the stream.
|
||||
for record in stream {
|
||||
// Sanity check, as 0 = index, 1 = payload
|
||||
if record.len() != 2 {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let serde_json::Value::String(record_pl) = &record[1] {
|
||||
let record_b = hex::decode(record_pl).context("Decode stream payload")?;
|
||||
payloads.push(record_b);
|
||||
}
|
||||
}
|
||||
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
let ts: DateTime<Utc> = (*pl.time.as_ref().unwrap())
|
||||
.try_into()
|
||||
.map_err(anyhow::Error::msg)?;
|
||||
let dev_eui = EUI64::from_str(&di.dev_eui)?;
|
||||
|
||||
for p in &payloads {
|
||||
let mut index = 0;
|
||||
// There must be at least 2 bytes to read (tag + length)
|
||||
while p.len() - index >= 2 {
|
||||
// Tag
|
||||
let t = p[index];
|
||||
// Length
|
||||
let l = p[index + 1] as usize;
|
||||
|
||||
// Validate that we can at least read 'l' data
|
||||
if p.len() - index - 2 < l {
|
||||
return Err(anyhow!("Invalid TLV record"));
|
||||
}
|
||||
|
||||
// Get v
|
||||
let v = &p[index + 2..index + 2 + l];
|
||||
|
||||
// increment index (2 bytes for t and l bytes + length of v)
|
||||
index = index + 2 + l;
|
||||
|
||||
match t {
|
||||
// GNSS
|
||||
0x06 | 0x07 => {
|
||||
let mut msg_gnss = client::UplinkMsgGnss {
|
||||
msg_type: "gnss".into(),
|
||||
payload: hex::encode(v),
|
||||
timestamp: ts.timestamp_millis() as f64 / 1000.0,
|
||||
gnss_capture_time: None,
|
||||
gnss_capture_time_accuracy: None,
|
||||
gnss_assist_position: None,
|
||||
gnss_assist_altitude: None,
|
||||
};
|
||||
|
||||
// Note: we must rely on the embedded gnss timestamp, as the frame
|
||||
// is de-fragmented and we can not assume the scan time from the
|
||||
// rx timestamp.
|
||||
|
||||
if let Some(loc) = get_start_location(&pl.rx_info) {
|
||||
msg_gnss.gnss_assist_position = Some(vec![loc.latitude, loc.longitude]);
|
||||
msg_gnss.gnss_assist_altitude = Some(loc.altitude);
|
||||
}
|
||||
|
||||
let req = client::UplinkRequest {
|
||||
dev_eui: client::Eui64Wrapper::new(&dev_eui),
|
||||
uplink: client::UplinkMsg::Gnss(msg_gnss),
|
||||
};
|
||||
let resp = self.client.uplink_send(&req).await?;
|
||||
self.handle_modem_response(
|
||||
vars,
|
||||
pl,
|
||||
&resp,
|
||||
common::LocationSource::GeoResolverGnss,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
// Wif (legacy)
|
||||
0x08 => {
|
||||
let mut vv = vec![0x01];
|
||||
vv.extend_from_slice(v);
|
||||
|
||||
let req = client::UplinkRequest {
|
||||
dev_eui: client::Eui64Wrapper::new(&dev_eui),
|
||||
uplink: client::UplinkMsg::Wifi(client::UplinkMsgWifi {
|
||||
msg_type: "wifi".into(),
|
||||
payload: hex::encode(vv),
|
||||
timestamp: ts.timestamp_millis() as f64 / 1000.0,
|
||||
}),
|
||||
};
|
||||
let resp = self.client.uplink_send(&req).await?;
|
||||
self.handle_modem_response(
|
||||
vars,
|
||||
pl,
|
||||
&resp,
|
||||
common::LocationSource::GeoResolverWifi,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
// Wifi
|
||||
0x0e => {
|
||||
// we have to skip first 5 bytes
|
||||
if v.len() < 5 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut vv = vec![0x01];
|
||||
vv.extend_from_slice(&v[5..]);
|
||||
|
||||
let req = client::UplinkRequest {
|
||||
dev_eui: client::Eui64Wrapper::new(&dev_eui),
|
||||
uplink: client::UplinkMsg::Wifi(client::UplinkMsgWifi {
|
||||
msg_type: "wifi".into(),
|
||||
payload: hex::encode(vv),
|
||||
timestamp: ts.timestamp_millis() as f64 / 1000.0,
|
||||
}),
|
||||
};
|
||||
let resp = self.client.uplink_send(&req).await?;
|
||||
self.handle_modem_response(
|
||||
vars,
|
||||
pl,
|
||||
&resp,
|
||||
common::LocationSource::GeoResolverWifi,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
_ => {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_response_integration_event(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
result: &client::UplinkResponseResult,
|
||||
) -> Result<()> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
trace!(dev_eui = %di.dev_eui, "Handling response integration event");
|
||||
|
||||
let int_pl = integration::IntegrationEvent {
|
||||
deduplication_id: pl.deduplication_id.clone(),
|
||||
device_info: pl.device_info.clone(),
|
||||
time: Some(Utc::now().into()),
|
||||
integration_name: "loracloud".into(),
|
||||
event_type: "modem_UplinkResponse".into(),
|
||||
object: Some(convert::serde_json_to_pb_json(&serde_json::to_value(
|
||||
result,
|
||||
)?)),
|
||||
};
|
||||
|
||||
integration_event(Uuid::from_str(&di.application_id)?, vars, &int_pl).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_response_downlink(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
result: &client::LoraDownlink,
|
||||
) -> Result<()> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
|
||||
trace!(dev_eui = %di.dev_eui, "Handling downlink enqueue");
|
||||
let dev_eui = EUI64::from_str(&di.dev_eui)?;
|
||||
|
||||
let _ = device_queue::enqueue_item(device_queue::DeviceQueueItem {
|
||||
dev_eui,
|
||||
f_port: match result.port {
|
||||
0 => 150,
|
||||
_ => result.port,
|
||||
} as i16,
|
||||
data: hex::decode(&result.payload)?,
|
||||
..Default::default()
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_response_position(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
result: &client::PositionSolution,
|
||||
source: common::LocationSource,
|
||||
) -> Result<()> {
|
||||
if result.llh.len() != 3 {
|
||||
return Err(anyhow!("llh must contain exactly 3 items"));
|
||||
}
|
||||
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
|
||||
let loc_pl = integration::LocationEvent {
|
||||
deduplication_id: pl.deduplication_id.clone(),
|
||||
device_info: pl.device_info.clone(),
|
||||
time: Some(Utc::now().into()),
|
||||
location: Some(common::Location {
|
||||
latitude: result.llh[0],
|
||||
longitude: result.llh[1],
|
||||
altitude: result.llh[2],
|
||||
source: source.into(),
|
||||
accuracy: result.accuracy.unwrap_or_default(),
|
||||
}),
|
||||
};
|
||||
|
||||
location_event(Uuid::from_str(&di.application_id)?, vars, &loc_pl).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn update_geoloc_buffer(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<Vec<Vec<gw::UplinkRxInfo>>> {
|
||||
// Do not trigger geolocation if there are less than 3 gateways.
|
||||
if pl.rx_info.len() < 3 {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
let dev_eui = EUI64::from_str(&di.dev_eui)?;
|
||||
let ttl = Duration::try_seconds(
|
||||
self.config
|
||||
.modem_geolocation_services
|
||||
.geolocation_buffer_ttl as i64,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut buf = vec![pl.rx_info.clone()];
|
||||
buf.extend_from_slice(&buffer::get_geoloc_buffer(&dev_eui, ttl).await?);
|
||||
buf.truncate(
|
||||
(self
|
||||
.config
|
||||
.modem_geolocation_services
|
||||
.geolocation_min_buffer_size
|
||||
+ 1) as usize, // buffer + current uplink
|
||||
);
|
||||
|
||||
buffer::save_geoloc_buffer(&dev_eui, &ttl, &buf).await?;
|
||||
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
async fn handle_geolocation(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
buffer: &[Vec<gw::UplinkRxInfo>],
|
||||
) -> Result<Option<common::Location>> {
|
||||
if self.config.modem_geolocation_services.geolocation_gnss {
|
||||
let loc = self.handle_geolocation_gnss(pl).await?;
|
||||
if loc.is_some() {
|
||||
return Ok(loc);
|
||||
}
|
||||
}
|
||||
|
||||
if self.config.modem_geolocation_services.geolocation_wifi {
|
||||
let loc = self.handle_geolocation_wifi(pl).await?;
|
||||
if loc.is_some() {
|
||||
return Ok(loc);
|
||||
}
|
||||
}
|
||||
|
||||
if self.config.modem_geolocation_services.geolocation_tdoa {
|
||||
let loc = self.handle_geolocation_tdoa(pl, buffer).await?;
|
||||
if loc.is_some() {
|
||||
return Ok(loc);
|
||||
}
|
||||
}
|
||||
|
||||
if self.config.modem_geolocation_services.geolocation_rssi {
|
||||
let loc = self.handle_geolocation_rssi(pl, buffer).await?;
|
||||
if loc.is_some() {
|
||||
return Ok(loc);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn handle_geolocation_gnss(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<Option<common::Location>> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
trace!(dev_eui = %di.dev_eui, "Trying GNSS geolocation");
|
||||
|
||||
let gnss_pl_str: String = match &pl.object {
|
||||
None => {
|
||||
// object is not set
|
||||
return Ok(None);
|
||||
}
|
||||
Some(v) => {
|
||||
// retrieve gnss payload field.
|
||||
let field = match v.fields.get(
|
||||
&self
|
||||
.config
|
||||
.modem_geolocation_services
|
||||
.geolocation_gnss_payload_field,
|
||||
) {
|
||||
None => {
|
||||
// object does not contain gnss payload field
|
||||
return Ok(None);
|
||||
}
|
||||
Some(v) => v,
|
||||
};
|
||||
|
||||
// Try to cast the field to String.
|
||||
if let Some(pbjson_types::value::Kind::StringValue(v)) = &field.kind {
|
||||
v.to_string()
|
||||
} else {
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let b = hex::decode(&gnss_pl_str).context("Decode GNSS payload field")?;
|
||||
let resp = self
|
||||
.client
|
||||
.gnss_lr1110_single_frame(
|
||||
&pl.rx_info,
|
||||
self.config
|
||||
.modem_geolocation_services
|
||||
.geolocation_gnss_use_rx_time,
|
||||
&b,
|
||||
)
|
||||
.await;
|
||||
|
||||
match resp {
|
||||
Ok(v) => Ok(Some(v)),
|
||||
Err(e) => {
|
||||
warn!(error = %e.full(), "GNSS geolocation failed");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_geolocation_wifi(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<Option<common::Location>> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
trace!(dev_eui = %di.dev_eui, "Trying wifi geolocation");
|
||||
|
||||
let wifi_aps: Vec<client::WifiAccessPoint> = match &pl.object {
|
||||
None => {
|
||||
// object is not set
|
||||
return Ok(None);
|
||||
}
|
||||
Some(v) => {
|
||||
// retrieve wifi payload field.
|
||||
let field = match v.fields.get(
|
||||
&self
|
||||
.config
|
||||
.modem_geolocation_services
|
||||
.geolocation_wifi_payload_field,
|
||||
) {
|
||||
None => {
|
||||
// object does not contain ifi payload field.
|
||||
return Ok(None);
|
||||
}
|
||||
Some(v) => v,
|
||||
};
|
||||
|
||||
// Try to cast the field to ListValue.
|
||||
let ap_list: &Vec<pbjson_types::Value> =
|
||||
if let Some(pbjson_types::value::Kind::ListValue(v)) = &field.kind {
|
||||
&v.values
|
||||
} else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
// Cast ListValue to Vec<WifiAccessPoint>
|
||||
let ap_list: Vec<client::WifiAccessPoint> = ap_list
|
||||
.iter()
|
||||
.map(|v| {
|
||||
if let Some(pbjson_types::value::Kind::StructValue(v)) = &v.kind {
|
||||
let mut ap: client::WifiAccessPoint = Default::default();
|
||||
|
||||
if let Some(field) = v.fields.get("macAddress") {
|
||||
if let Some(pbjson_types::value::Kind::StringValue(mac)) =
|
||||
&field.kind
|
||||
{
|
||||
ap.mac_address = mac.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(field) = v.fields.get("signalStrength") {
|
||||
if let Some(pbjson_types::value::Kind::NumberValue(sig)) =
|
||||
&field.kind
|
||||
{
|
||||
ap.signal_strength = *sig as isize;
|
||||
}
|
||||
}
|
||||
|
||||
ap
|
||||
} else {
|
||||
Default::default()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
ap_list
|
||||
}
|
||||
};
|
||||
|
||||
let resp = self
|
||||
.client
|
||||
.wifi_tdoa_single_frame(&pl.rx_info, &wifi_aps)
|
||||
.await;
|
||||
match resp {
|
||||
Ok(v) => Ok(Some(v)),
|
||||
Err(e) => {
|
||||
warn!(error = %e.full(), "Wifi geolocation failed");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_geolocation_tdoa(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
buffer: &[Vec<gw::UplinkRxInfo>],
|
||||
) -> Result<Option<common::Location>> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
trace!(dev_eui = %di.dev_eui, "Trying TDOA geolocation");
|
||||
|
||||
let resp = if buffer.len() == 1 {
|
||||
self.client.tdoa_single_frame(&buffer[0]).await
|
||||
} else {
|
||||
self.client.tdoa_multi_frame(buffer).await
|
||||
};
|
||||
|
||||
match resp {
|
||||
Ok(v) => Ok(Some(v)),
|
||||
Err(e) => {
|
||||
warn!(error = %e.full(), "TDOA geolocation failed");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_geolocation_rssi(
|
||||
&self,
|
||||
pl: &integration::UplinkEvent,
|
||||
buffer: &[Vec<gw::UplinkRxInfo>],
|
||||
) -> Result<Option<common::Location>> {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
trace!(dev_eui = %di.dev_eui, "Trying RSSI geolocation");
|
||||
|
||||
let resp = if buffer.len() == 1 {
|
||||
self.client.rssi_single_frame(&buffer[0]).await
|
||||
} else {
|
||||
self.client.rssi_multi_frame(buffer).await
|
||||
};
|
||||
|
||||
match resp {
|
||||
Ok(v) => Ok(Some(v)),
|
||||
Err(e) => {
|
||||
warn!(error = %e.full(), "RSSI geolocation failed");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl IntegrationTrait for Integration {
|
||||
async fn uplink_event(
|
||||
&self,
|
||||
vars: &HashMap<String, String>,
|
||||
pl: &integration::UplinkEvent,
|
||||
) -> Result<()> {
|
||||
if self.config.modem_geolocation_services.modem_enabled {
|
||||
if self
|
||||
.config
|
||||
.modem_geolocation_services
|
||||
.forward_f_ports
|
||||
.contains(&pl.f_port)
|
||||
{
|
||||
self.modem_updf(vars, pl).await?;
|
||||
} else {
|
||||
// Only forward meta-data.
|
||||
self.modem_metadata(vars, pl).await?;
|
||||
}
|
||||
}
|
||||
|
||||
// In case of TDOA or RSSI, add the rx-info to the geolocation buffer.
|
||||
let geoloc_buffer: Vec<Vec<gw::UplinkRxInfo>> =
|
||||
if self.config.modem_geolocation_services.geolocation_tdoa
|
||||
|| self.config.modem_geolocation_services.geolocation_rssi
|
||||
{
|
||||
self.update_geoloc_buffer(pl).await?
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
// In case geolocation is disabled, this returns None.
|
||||
let loc = self.handle_geolocation(pl, &geoloc_buffer).await?;
|
||||
if let Some(v) = loc {
|
||||
let di = pl.device_info.as_ref().unwrap();
|
||||
let loc_pl = integration::LocationEvent {
|
||||
deduplication_id: pl.deduplication_id.clone(),
|
||||
time: Some(Utc::now().into()),
|
||||
device_info: pl.device_info.clone(),
|
||||
location: Some(v),
|
||||
};
|
||||
|
||||
location_event(Uuid::from_str(&di.application_id)?, vars, &loc_pl).await;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn join_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
pl: &integration::JoinEvent,
|
||||
) -> Result<()> {
|
||||
if self.config.modem_geolocation_services.modem_enabled {
|
||||
self.modem_joining(pl).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn ack_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::AckEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn txack_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::TxAckEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn log_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::LogEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn status_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::StatusEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn location_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::LocationEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn integration_event(
|
||||
&self,
|
||||
_vars: &HashMap<String, String>,
|
||||
_pl: &integration::IntegrationEvent,
|
||||
) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
@@ -18,12 +18,12 @@ use lrwn::EUI64;
|
||||
mod amqp;
|
||||
mod aws_sns;
|
||||
mod azure_service_bus;
|
||||
mod blynk;
|
||||
mod gcp_pub_sub;
|
||||
mod http;
|
||||
mod ifttt;
|
||||
mod influxdb;
|
||||
mod kafka;
|
||||
mod loracloud;
|
||||
#[cfg(test)]
|
||||
pub mod mock;
|
||||
mod mqtt;
|
||||
@@ -167,9 +167,6 @@ async fn for_application_id(id: Uuid) -> Result<Vec<Box<dyn Integration + Sync +
|
||||
application::IntegrationConfiguration::InfluxDb(conf) => {
|
||||
Box::new(influxdb::Integration::new(conf)?)
|
||||
}
|
||||
application::IntegrationConfiguration::LoraCloud(conf) => {
|
||||
Box::new(loracloud::Integration::new(conf))
|
||||
}
|
||||
application::IntegrationConfiguration::MyDevices(conf) => {
|
||||
Box::new(mydevices::Integration::new(conf))
|
||||
}
|
||||
@@ -182,6 +179,9 @@ async fn for_application_id(id: Uuid) -> Result<Vec<Box<dyn Integration + Sync +
|
||||
application::IntegrationConfiguration::Ifttt(conf) => {
|
||||
Box::new(ifttt::Integration::new(conf))
|
||||
}
|
||||
application::IntegrationConfiguration::Blynk(conf) => {
|
||||
Box::new(blynk::Integration::new(conf))
|
||||
}
|
||||
_ => {
|
||||
continue;
|
||||
}
|
||||
@@ -543,10 +543,12 @@ async fn handle_down_command(application_id: String, pl: integration::DownlinkCo
|
||||
}
|
||||
|
||||
let mut data = pl.data.clone();
|
||||
let mut f_port = pl.f_port as u8;
|
||||
|
||||
if let Some(obj) = &pl.object {
|
||||
let dp = device_profile::get(&dev.device_profile_id).await?;
|
||||
|
||||
data = codec::struct_to_binary(
|
||||
(f_port, data) = codec::struct_to_binary(
|
||||
dp.payload_codec_runtime,
|
||||
pl.f_port as u8,
|
||||
&dev.variables,
|
||||
@@ -561,10 +563,19 @@ async fn handle_down_command(application_id: String, pl: integration::DownlinkCo
|
||||
true => Uuid::new_v4().into(),
|
||||
false => Uuid::from_str(&pl.id)?.into(),
|
||||
},
|
||||
f_port: pl.f_port as i16,
|
||||
f_port: f_port as i16,
|
||||
confirmed: pl.confirmed,
|
||||
data,
|
||||
dev_eui,
|
||||
expires_at: if let Some(expires_at) = pl.expires_at {
|
||||
Some(
|
||||
expires_at
|
||||
.try_into()
|
||||
.map_err(|e| anyhow!("Parse expires_at error: {}", e))?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
|
@@ -702,6 +702,7 @@ pub mod test {
|
||||
f_port: 10,
|
||||
data: vec![1, 2, 3],
|
||||
object: None,
|
||||
expires_at: None,
|
||||
};
|
||||
let down_cmd_json = serde_json::to_string(&down_cmd).unwrap();
|
||||
client
|
||||
|
@@ -79,12 +79,12 @@ pub enum IntegrationKind {
|
||||
InfluxDb,
|
||||
ThingsBoard,
|
||||
MyDevices,
|
||||
LoraCloud,
|
||||
GcpPubSub,
|
||||
AwsSns,
|
||||
AzureServiceBus,
|
||||
PilotThings,
|
||||
Ifttt,
|
||||
Blynk,
|
||||
}
|
||||
|
||||
impl fmt::Display for IntegrationKind {
|
||||
@@ -102,12 +102,12 @@ impl FromStr for IntegrationKind {
|
||||
"InfluxDb" => IntegrationKind::InfluxDb,
|
||||
"ThingsBoard" => IntegrationKind::ThingsBoard,
|
||||
"MyDevices" => IntegrationKind::MyDevices,
|
||||
"LoraCloud" => IntegrationKind::LoraCloud,
|
||||
"GcpPubSub" => IntegrationKind::GcpPubSub,
|
||||
"AwsSns" => IntegrationKind::AwsSns,
|
||||
"AzureServiceBus" => IntegrationKind::AzureServiceBus,
|
||||
"PilotThings" => IntegrationKind::PilotThings,
|
||||
"Ifttt" => IntegrationKind::Ifttt,
|
||||
"Blynk" => IntegrationKind::Blynk,
|
||||
_ => {
|
||||
return Err(anyhow!("Unexpected IntegrationKind: {}", s));
|
||||
}
|
||||
@@ -152,12 +152,12 @@ pub enum IntegrationConfiguration {
|
||||
InfluxDb(InfluxDbConfiguration),
|
||||
ThingsBoard(ThingsBoardConfiguration),
|
||||
MyDevices(MyDevicesConfiguration),
|
||||
LoraCloud(LoraCloudConfiguration),
|
||||
GcpPubSub(GcpPubSubConfiguration),
|
||||
AwsSns(AwsSnsConfiguration),
|
||||
AzureServiceBus(AzureServiceBusConfiguration),
|
||||
PilotThings(PilotThingsConfiguration),
|
||||
Ifttt(IftttConfiguration),
|
||||
Blynk(BlynkConfiguration),
|
||||
}
|
||||
|
||||
#[cfg(feature = "postgres")]
|
||||
@@ -224,33 +224,6 @@ pub struct MyDevicesConfiguration {
|
||||
pub endpoint: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct LoraCloudConfiguration {
|
||||
pub modem_geolocation_services: LoraCloudModemGeolocationServices,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct LoraCloudModemGeolocationServices {
|
||||
pub token: String,
|
||||
pub modem_enabled: bool,
|
||||
pub modem_port: u32,
|
||||
pub gnss_port: u32,
|
||||
pub forward_f_ports: Vec<u32>,
|
||||
pub gnss_use_rx_time: bool,
|
||||
pub gnss_use_gateway_location: bool,
|
||||
pub parse_tlv: bool,
|
||||
pub geolocation_buffer_ttl: u32,
|
||||
pub geolocation_min_buffer_size: u32,
|
||||
pub geolocation_tdoa: bool,
|
||||
pub geolocation_rssi: bool,
|
||||
pub geolocation_gnss: bool,
|
||||
pub geolocation_gnss_payload_field: String,
|
||||
pub geolocation_gnss_use_rx_time: bool,
|
||||
pub geolocation_wifi: bool,
|
||||
pub geolocation_wifi_payload_field: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct GcpPubSubConfiguration {
|
||||
pub encoding: i32,
|
||||
@@ -290,6 +263,12 @@ pub struct IftttConfiguration {
|
||||
pub event_prefix: String,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct BlynkConfiguration {
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Queryable, Insertable, PartialEq, Eq, Debug)]
|
||||
#[diesel(table_name = application_integration)]
|
||||
pub struct Integration {
|
||||
@@ -468,7 +447,7 @@ pub async fn get_integration(
|
||||
application_id: &Uuid,
|
||||
kind: IntegrationKind,
|
||||
) -> Result<Integration, Error> {
|
||||
let mut i: Integration = application_integration::dsl::application_integration
|
||||
let i: Integration = application_integration::dsl::application_integration
|
||||
.filter(
|
||||
application_integration::dsl::application_id
|
||||
.eq(fields::Uuid::from(application_id))
|
||||
@@ -478,18 +457,6 @@ pub async fn get_integration(
|
||||
.await
|
||||
.map_err(|e| Error::from_diesel(e, application_id.to_string()))?;
|
||||
|
||||
// For backwards compatibiliy
|
||||
if let IntegrationConfiguration::LoraCloud(conf) = &mut i.configuration {
|
||||
if conf.modem_geolocation_services.forward_f_ports.is_empty() {
|
||||
conf.modem_geolocation_services.forward_f_ports = vec![
|
||||
conf.modem_geolocation_services.modem_port,
|
||||
conf.modem_geolocation_services.gnss_port,
|
||||
197,
|
||||
192,
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
Ok(i)
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
module request-log
|
||||
|
||||
go 1.18
|
||||
go 1.23.0
|
||||
|
||||
require (
|
||||
github.com/chirpstack/chirpstack/api/go/v4 v4.6.0
|
||||
@@ -12,6 +12,6 @@ require (
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
golang.org/x/net v0.36.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/net v0.38.0 // indirect
|
||||
golang.org/x/sys v0.31.0 // indirect
|
||||
)
|
||||
|
@@ -5,19 +5,26 @@ github.com/chirpstack/chirpstack/api/go/v4 v4.6.0/go.mod h1:6+68s1PGHq2QWZ216RTw
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC0oI=
|
||||
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE=
|
||||
github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU=
|
||||
github.com/onsi/gomega v1.18.1 h1:M1GfJqGRrBrrGGsbxzV5dqM2U2ApXefZCQpkukxYRLE=
|
||||
golang.org/x/net v0.36.0 h1:vWF2fRbw4qslQsQzgFqZff+BItCvGFQqKzKIzx1rmoA=
|
||||
golang.org/x/net v0.36.0/go.mod h1:bFmbeoIPfrw4sMHNhb4J9f6+tPziuGjq7Jk/38fxi1I=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM=
|
||||
github.com/onsi/gomega v1.18.1/go.mod h1:0q+aL8jAiMXy9hbwj2mr5GziHiwhAIQpFmmtT5hitRs=
|
||||
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
|
||||
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
|
||||
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
|
||||
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
|
||||
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
|
||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
|
||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
|
@@ -3,7 +3,7 @@
|
||||
description = "Library for filtering LoRaWAN payloads on DevAddr and JoinEUIs prefixes"
|
||||
homepage = "https://www.chirpstack.io/"
|
||||
license = "MIT"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
authors = ["Orne Brocaar <info@brocaar.com>"]
|
||||
edition = "2021"
|
||||
repository = "https://github.com/chirpstack/chirpstack"
|
||||
|
@@ -3,7 +3,7 @@
|
||||
description = "Library for encoding / decoding LoRaWAN frames."
|
||||
homepage = "https://www.chirpstack.io"
|
||||
license = "MIT"
|
||||
version = "4.13.0"
|
||||
version = "4.14.0"
|
||||
authors = ["Orne Brocaar <info@brocaar.com>"]
|
||||
edition = "2018"
|
||||
repository = "https://github.com/chirpstack/chirpstack"
|
||||
|
@@ -7,6 +7,7 @@ use aes::{
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::applayer::PayloadCodec;
|
||||
use crate::helpers::{decode_freq, encode_freq};
|
||||
use crate::{AES128Key, DevAddr};
|
||||
|
||||
pub enum Cid {
|
||||
@@ -495,11 +496,7 @@ impl PayloadCodec for McClassCSessionReqPayload {
|
||||
session_time_out: McClassCSessionReqPayloadSessionTimeOut {
|
||||
time_out: b[5] & 0x0f,
|
||||
},
|
||||
dl_frequ: {
|
||||
let mut bytes = [0; 4];
|
||||
bytes[0..3].copy_from_slice(&b[6..9]);
|
||||
u32::from_le_bytes(bytes) * 100
|
||||
},
|
||||
dl_frequ: decode_freq(&b[6..9])?,
|
||||
dr: b[9],
|
||||
})
|
||||
}
|
||||
@@ -513,15 +510,11 @@ impl PayloadCodec for McClassCSessionReqPayload {
|
||||
return Err(anyhow!("Max time_out value is 15"));
|
||||
}
|
||||
|
||||
if self.dl_frequ % 100 != 0 {
|
||||
return Err(anyhow!("dl_frequ must be a multiple of 100"));
|
||||
}
|
||||
|
||||
let mut b = Vec::with_capacity(10);
|
||||
b.push(self.mc_group_id_header.mc_group_id);
|
||||
b.extend_from_slice(&self.session_time.to_le_bytes());
|
||||
b.push(self.session_time_out.time_out);
|
||||
b.extend_from_slice(&(self.dl_frequ / 100).to_le_bytes()[0..3]);
|
||||
b.extend_from_slice(&encode_freq(self.dl_frequ)?);
|
||||
b.push(self.dr);
|
||||
|
||||
Ok(b)
|
||||
@@ -638,11 +631,7 @@ impl PayloadCodec for McClassBSessionReqPayload {
|
||||
time_out: b[5] & 0x0f,
|
||||
periodicity: (b[5] >> 4) & 0x07,
|
||||
},
|
||||
dl_frequ: {
|
||||
let mut bytes = [0; 4];
|
||||
bytes[0..3].copy_from_slice(&b[6..9]);
|
||||
u32::from_le_bytes(bytes) * 100
|
||||
},
|
||||
dl_frequ: decode_freq(&b[6..9])?,
|
||||
dr: b[9],
|
||||
})
|
||||
}
|
||||
@@ -664,7 +653,7 @@ impl PayloadCodec for McClassBSessionReqPayload {
|
||||
b.push(self.mc_group_id_header.mc_group_id);
|
||||
b.extend_from_slice(&self.session_time.to_le_bytes());
|
||||
b.push((self.time_out_periodicity.periodicity << 4) | self.time_out_periodicity.time_out);
|
||||
b.extend_from_slice(&(self.dl_frequ / 100).to_le_bytes()[0..3]);
|
||||
b.extend_from_slice(&encode_freq(self.dl_frequ)?);
|
||||
b.push(self.dr);
|
||||
|
||||
Ok(b)
|
||||
|
@@ -7,6 +7,7 @@ use aes::{
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::applayer::PayloadCodec;
|
||||
use crate::helpers::{decode_freq, encode_freq};
|
||||
use crate::{AES128Key, DevAddr};
|
||||
|
||||
pub enum Cid {
|
||||
@@ -495,11 +496,7 @@ impl PayloadCodec for McClassCSessionReqPayload {
|
||||
session_time_out: McClassCSessionReqPayloadSessionTimeOut {
|
||||
time_out: b[5] & 0x0f,
|
||||
},
|
||||
dl_frequ: {
|
||||
let mut bytes = [0; 4];
|
||||
bytes[0..3].copy_from_slice(&b[6..9]);
|
||||
u32::from_le_bytes(bytes) * 100
|
||||
},
|
||||
dl_frequ: decode_freq(&b[6..9])?,
|
||||
dr: b[9],
|
||||
})
|
||||
}
|
||||
@@ -513,15 +510,11 @@ impl PayloadCodec for McClassCSessionReqPayload {
|
||||
return Err(anyhow!("Max time_out value is 15"));
|
||||
}
|
||||
|
||||
if self.dl_frequ % 100 != 0 {
|
||||
return Err(anyhow!("dl_frequ must be a multiple of 100"));
|
||||
}
|
||||
|
||||
let mut b = Vec::with_capacity(10);
|
||||
b.push(self.mc_group_id_header.mc_group_id);
|
||||
b.extend_from_slice(&self.session_time.to_le_bytes());
|
||||
b.push(self.session_time_out.time_out);
|
||||
b.extend_from_slice(&(self.dl_frequ / 100).to_le_bytes()[0..3]);
|
||||
b.extend_from_slice(&encode_freq(self.dl_frequ)?);
|
||||
b.push(self.dr);
|
||||
|
||||
Ok(b)
|
||||
@@ -643,11 +636,7 @@ impl PayloadCodec for McClassBSessionReqPayload {
|
||||
time_out: b[5] & 0x0f,
|
||||
periodicity: (b[5] >> 4) & 0x07,
|
||||
},
|
||||
dl_frequ: {
|
||||
let mut bytes = [0; 4];
|
||||
bytes[0..3].copy_from_slice(&b[6..9]);
|
||||
u32::from_le_bytes(bytes) * 100
|
||||
},
|
||||
dl_frequ: decode_freq(&b[6..9])?,
|
||||
dr: b[9],
|
||||
})
|
||||
}
|
||||
@@ -669,7 +658,7 @@ impl PayloadCodec for McClassBSessionReqPayload {
|
||||
b.push(self.mc_group_id_header.mc_group_id);
|
||||
b.extend_from_slice(&self.session_time.to_le_bytes());
|
||||
b.push((self.time_out_periodicity.periodicity << 4) | self.time_out_periodicity.time_out);
|
||||
b.extend_from_slice(&(self.dl_frequ / 100).to_le_bytes()[0..3]);
|
||||
b.extend_from_slice(&encode_freq(self.dl_frequ)?);
|
||||
b.push(self.dr);
|
||||
|
||||
Ok(b)
|
||||
|
@@ -23,6 +23,7 @@ pkgs.mkShell {
|
||||
pkgs.cargo-cross # cross-compiling
|
||||
pkgs.cargo-deb # deb packaging
|
||||
pkgs.diesel-cli # diesel cli
|
||||
pkgs.jq # json query cli tool
|
||||
];
|
||||
LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib";
|
||||
BINDGEN_EXTRA_CLANG_ARGS = "-I${pkgs.llvmPackages.libclang.lib}/lib/clang/${pkgs.llvmPackages.libclang.version}/include";
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "chirpstack-ui",
|
||||
"version": "4.13.0",
|
||||
"version": "4.14.0",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
|
BIN
ui/public/integrations/blynk.png
Normal file
BIN
ui/public/integrations/blynk.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 20 KiB |
Binary file not shown.
Before Width: | Height: | Size: 28 KiB |
@@ -47,11 +47,6 @@ import type {
|
||||
GetPilotThingsIntegrationResponse,
|
||||
UpdatePilotThingsIntegrationRequest,
|
||||
DeletePilotThingsIntegrationRequest,
|
||||
CreateLoraCloudIntegrationRequest,
|
||||
GetLoraCloudIntegrationRequest,
|
||||
GetLoraCloudIntegrationResponse,
|
||||
UpdateLoraCloudIntegrationRequest,
|
||||
DeleteLoraCloudIntegrationRequest,
|
||||
CreateThingsBoardIntegrationRequest,
|
||||
GetThingsBoardIntegrationRequest,
|
||||
GetThingsBoardIntegrationResponse,
|
||||
@@ -62,6 +57,11 @@ import type {
|
||||
GetIftttIntegrationResponse,
|
||||
UpdateIftttIntegrationRequest,
|
||||
DeleteIftttIntegrationRequest,
|
||||
CreateBlynkIntegrationRequest,
|
||||
GetBlynkIntegrationRequest,
|
||||
GetBlynkIntegrationResponse,
|
||||
UpdateBlynkIntegrationRequest,
|
||||
DeleteBlynkIntegrationRequest,
|
||||
GenerateMqttIntegrationClientCertificateRequest,
|
||||
GenerateMqttIntegrationClientCertificateResponse,
|
||||
ListApplicationDeviceProfilesRequest,
|
||||
@@ -72,6 +72,7 @@ import type {
|
||||
|
||||
import SessionStore from "./SessionStore";
|
||||
import { HandleError } from "./helpers";
|
||||
import { callback } from "chart.js/helpers";
|
||||
|
||||
class ApplicationStore extends EventEmitter {
|
||||
client: ApplicationServiceClient;
|
||||
@@ -602,69 +603,6 @@ class ApplicationStore extends EventEmitter {
|
||||
});
|
||||
};
|
||||
|
||||
createLoraCloudIntegration = (req: CreateLoraCloudIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.createLoraCloudIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
notification.success({
|
||||
message: "LoRa Cloud interation created",
|
||||
duration: 3,
|
||||
});
|
||||
|
||||
callbackFunc();
|
||||
});
|
||||
};
|
||||
|
||||
getLoraCloudIntegration = (
|
||||
req: GetLoraCloudIntegrationRequest,
|
||||
callbackFunc: (resp: GetLoraCloudIntegrationResponse) => void,
|
||||
) => {
|
||||
this.client.getLoraCloudIntegration(req, SessionStore.getMetadata(), (err, resp) => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callbackFunc(resp);
|
||||
});
|
||||
};
|
||||
|
||||
updateLoraCloudIntegration = (req: UpdateLoraCloudIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.updateLoraCloudIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
notification.success({
|
||||
message: "LoRa Cloud integration updated",
|
||||
duration: 3,
|
||||
});
|
||||
|
||||
callbackFunc();
|
||||
});
|
||||
};
|
||||
|
||||
deleteLoraCloudIntegration = (req: DeleteLoraCloudIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.deleteLoraCloudIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
notification.success({
|
||||
message: "LoRa Cloud integration deleted",
|
||||
duration: 3,
|
||||
});
|
||||
|
||||
this.emit("integration.delete");
|
||||
callbackFunc();
|
||||
});
|
||||
};
|
||||
|
||||
createThingsBoardIntegration = (req: CreateThingsBoardIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.createThingsBoardIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
@@ -791,6 +729,69 @@ class ApplicationStore extends EventEmitter {
|
||||
});
|
||||
};
|
||||
|
||||
createBlynkIntegration = (req: CreateBlynkIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.createBlynkIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
notification.success({
|
||||
message: "Blynk integration created",
|
||||
duration: 3,
|
||||
});
|
||||
|
||||
callbackFunc();
|
||||
});
|
||||
};
|
||||
|
||||
getBlynkIntegration = (
|
||||
req: GetBlynkIntegrationRequest,
|
||||
callbackFunc: (resp: GetBlynkIntegrationResponse) => void,
|
||||
) => {
|
||||
this.client.getBlynkIntegration(req, SessionStore.getMetadata(), (err, resp) => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
callbackFunc(resp);
|
||||
});
|
||||
};
|
||||
|
||||
updateBlynkIntegration = (req: UpdateBlynkIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.updateBlynkIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
notification.success({
|
||||
message: "Blynk integration updated",
|
||||
duration: 3,
|
||||
});
|
||||
|
||||
callbackFunc();
|
||||
});
|
||||
};
|
||||
|
||||
deleteBlynkIntegration = (req: DeleteBlynkIntegrationRequest, callbackFunc: () => void) => {
|
||||
this.client.deleteBlynkIntegration(req, SessionStore.getMetadata(), err => {
|
||||
if (err !== null) {
|
||||
HandleError(err);
|
||||
return;
|
||||
}
|
||||
|
||||
notification.success({
|
||||
message: "Blynk integration deleted",
|
||||
duration: 3,
|
||||
});
|
||||
|
||||
this.emit("integration.delete");
|
||||
callbackFunc();
|
||||
});
|
||||
};
|
||||
|
||||
generateMqttIntegrationClientCertificate = (
|
||||
req: GenerateMqttIntegrationClientCertificateRequest,
|
||||
callbackFunc: (resp: GenerateMqttIntegrationClientCertificateResponse) => void,
|
||||
|
@@ -32,13 +32,13 @@ import CreateMyDevicesIntegration from "./integrations/CreateMyDevicesIntegratio
|
||||
import EditMyDevicesIntegration from "./integrations/EditMyDevicesIntegration";
|
||||
import CreatePilotThingsIntegration from "./integrations/CreatePilotThingsIntegration";
|
||||
import EditPilotThingsIntegration from "./integrations/EditPilotThingsIntegration";
|
||||
import CreateLoRaCloudIntegration from "./integrations/CreateLoRaCloudIntegration";
|
||||
import EditLoRaCloudIntegration from "./integrations/EditLoRaCloudIntegration";
|
||||
import CreateThingsBoardIntegration from "./integrations/CreateThingsBoardIntegration";
|
||||
import EditThingsBoardIntegration from "./integrations/EditThingsBoardIntegration";
|
||||
import GenerateMqttCertificate from "./integrations/GenerateMqttCertificate";
|
||||
import CreateIftttIntegration from "./integrations/CreateIftttIntegration";
|
||||
import EditIftttIntegration from "./integrations/EditIftttIntegration";
|
||||
import CreateBlynkIntegration from "./integrations/CreateBlynkIntegration";
|
||||
import EditBlynkIntegration from "./integrations/EditBlynkIntegration";
|
||||
import { useTitle } from "../helpers";
|
||||
|
||||
interface IProps {
|
||||
@@ -184,8 +184,6 @@ function ApplicationLayout(props: IProps) {
|
||||
element={<CreatePilotThingsIntegration application={app} />}
|
||||
/>
|
||||
<Route path="/integrations/pilot-things/edit" element={<EditPilotThingsIntegration application={app} />} />
|
||||
<Route path="/integrations/loracloud/create" element={<CreateLoRaCloudIntegration application={app} />} />
|
||||
<Route path="/integrations/loracloud/edit" element={<EditLoRaCloudIntegration application={app} />} />
|
||||
<Route path="/integrations/thingsboard/create" element={<CreateThingsBoardIntegration application={app} />} />
|
||||
<Route path="/integrations/thingsboard/edit" element={<EditThingsBoardIntegration application={app} />} />
|
||||
<Route path="/integrations/mqtt/certificate" element={<GenerateMqttCertificate application={app} />} />
|
||||
@@ -197,6 +195,8 @@ function ApplicationLayout(props: IProps) {
|
||||
path="/integrations/ifttt/edit"
|
||||
element={<EditIftttIntegration application={app} measurementKeys={props.measurementKeys} />}
|
||||
/>
|
||||
<Route path="/integrations/blynk/create" element={<CreateBlynkIntegration application={app} />} />
|
||||
<Route path="/integrations/blynk/edit" element={<EditBlynkIntegration application={app} />} />
|
||||
</Routes>
|
||||
</Card>
|
||||
</Space>
|
||||
|
@@ -18,9 +18,9 @@ import AzureServiceBusCard from "./integrations/AzureServiceBusCard";
|
||||
import GcpPubSubCard from "./integrations/GcpPubSubCard";
|
||||
import InfluxdbCard from "./integrations/InfluxdbCard";
|
||||
import PilotThingsCard from "./integrations/PilotThingsCard";
|
||||
import LoRaCloudCard from "./integrations/LoRaCloudCard";
|
||||
import ThingsBoardCard from "./integrations/ThingsBoardCard";
|
||||
import IftttCard from "./integrations/IftttCard";
|
||||
import BlynkCard from "./integrations/BlynkCard";
|
||||
|
||||
interface IProps {
|
||||
application: Application;
|
||||
@@ -63,6 +63,13 @@ function ListIntegrations(props: IProps) {
|
||||
available.push(<AzureServiceBusCard application={props.application} add />);
|
||||
}
|
||||
|
||||
// Blynk
|
||||
if (includes(resp.getResultList(), IntegrationKind.BLYNK)) {
|
||||
configured.push(<BlynkCard application={props.application} />);
|
||||
} else {
|
||||
available.push(<BlynkCard application={props.application} add />);
|
||||
}
|
||||
|
||||
// GCP Pub/Sub
|
||||
if (includes(resp.getResultList(), IntegrationKind.GCP_PUB_SUB)) {
|
||||
configured.push(<GcpPubSubCard application={props.application} />);
|
||||
@@ -110,13 +117,6 @@ function ListIntegrations(props: IProps) {
|
||||
available.push(<PilotThingsCard application={props.application} add />);
|
||||
}
|
||||
|
||||
// Semtech LoRa Cloud
|
||||
if (includes(resp.getResultList(), IntegrationKind.LORA_CLOUD)) {
|
||||
configured.push(<LoRaCloudCard application={props.application} />);
|
||||
} else {
|
||||
available.push(<LoRaCloudCard application={props.application} add />);
|
||||
}
|
||||
|
||||
// ThingsBoard
|
||||
if (includes(resp.getResultList(), IntegrationKind.THINGS_BOARD)) {
|
||||
configured.push(<ThingsBoardCard application={props.application} />);
|
||||
|
@@ -4,7 +4,7 @@ import { Col, Card, Popconfirm } from "antd";
|
||||
import { PlusOutlined, EditOutlined, DeleteOutlined } from "@ant-design/icons";
|
||||
|
||||
import type { Application } from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
import { DeleteLoraCloudIntegrationRequest } from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
import { DeleteBlynkIntegrationRequest } from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import ApplicationStore from "../../../stores/ApplicationStore";
|
||||
|
||||
@@ -13,24 +13,24 @@ interface IProps {
|
||||
add?: boolean;
|
||||
}
|
||||
|
||||
function LoRaCloudCard(props: IProps) {
|
||||
function BlynkCard(props: IProps) {
|
||||
const onDelete = () => {
|
||||
const req = new DeleteLoraCloudIntegrationRequest();
|
||||
const req = new DeleteBlynkIntegrationRequest();
|
||||
req.setApplicationId(props.application.getId());
|
||||
ApplicationStore.deleteLoraCloudIntegration(req, () => {});
|
||||
ApplicationStore.deleteBlynkIntegration(req, () => {});
|
||||
};
|
||||
|
||||
let actions: JSX.Element[] = [];
|
||||
|
||||
if (props.add) {
|
||||
actions = [
|
||||
<Link to="loracloud/create">
|
||||
<Link to="blynk/create">
|
||||
<PlusOutlined />
|
||||
</Link>,
|
||||
];
|
||||
} else {
|
||||
actions = [
|
||||
<Link to="loracloud/edit">
|
||||
<Link to="blynk/edit">
|
||||
<EditOutlined />
|
||||
</Link>,
|
||||
<Popconfirm title="Are you sure you want to delete this integration?" onConfirm={onDelete}>
|
||||
@@ -42,15 +42,15 @@ function LoRaCloudCard(props: IProps) {
|
||||
return (
|
||||
<Col span={8}>
|
||||
<Card
|
||||
title="Semtech LoRa Cloud™"
|
||||
title="Blynk"
|
||||
className="integration-card"
|
||||
cover={<img alt="Semtech LoRa Cloud" src="/integrations/loracloud.png" style={{ padding: 1 }} />}
|
||||
cover={<img alt="Blynk" src="/integrations/blynk.png" style={{ padding: 1 }} />}
|
||||
actions={actions}
|
||||
>
|
||||
<Card.Meta description="The Semtech LoRa Cloud integration provides Modem & Geolocation Services." />
|
||||
<Card.Meta description="The Blynk integration forwards events to the Blynk platform." />
|
||||
</Card>
|
||||
</Col>
|
||||
);
|
||||
}
|
||||
|
||||
export default LoRaCloudCard;
|
||||
export default BlynkCard;
|
@@ -0,0 +1,46 @@
|
||||
import { Form, Input, Button } from "antd";
|
||||
|
||||
import { BlynkIntegration } from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import { onFinishFailed } from "../../helpers";
|
||||
|
||||
interface IProps {
|
||||
initialValues: BlynkIntegration;
|
||||
onFinish: (obj: BlynkIntegration) => void;
|
||||
}
|
||||
|
||||
function BlynkIntegrationForm(props: IProps) {
|
||||
const onFinish = (values: BlynkIntegration.AsObject) => {
|
||||
const v = Object.assign(props.initialValues.toObject(), values);
|
||||
const i = new BlynkIntegration();
|
||||
|
||||
i.setApplicationId(v.applicationId);
|
||||
i.setToken(v.token);
|
||||
|
||||
props.onFinish(i);
|
||||
};
|
||||
|
||||
return (
|
||||
<Form
|
||||
layout="vertical"
|
||||
initialValues={props.initialValues.toObject()}
|
||||
onFinish={onFinish}
|
||||
onFinishFailed={onFinishFailed}
|
||||
>
|
||||
<Form.Item
|
||||
label="Blynk API token"
|
||||
name="token"
|
||||
rules={[{ required: true, message: "Please enter a Blynk API token!" }]}
|
||||
>
|
||||
<Input.Password />
|
||||
</Form.Item>
|
||||
<Form.Item>
|
||||
<Button type="primary" htmlType="submit">
|
||||
Submit
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
export default BlynkIntegrationForm;
|
@@ -0,0 +1,41 @@
|
||||
import { useNavigate } from "react-router-dom";
|
||||
|
||||
import { Card } from "antd";
|
||||
|
||||
import type { Application } from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
import {
|
||||
BlynkIntegration,
|
||||
CreateBlynkIntegrationRequest,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import BlynkIngrationForm from "./BlynkIntegrationForm";
|
||||
import ApplicationStore from "../../../stores/ApplicationStore";
|
||||
|
||||
interface IProps {
|
||||
application: Application;
|
||||
}
|
||||
|
||||
function CreateBlynkIntegration(props: IProps) {
|
||||
const navigate = useNavigate();
|
||||
|
||||
const onFinish = (obj: BlynkIntegration) => {
|
||||
obj.setApplicationId(props.application.getId());
|
||||
|
||||
const req = new CreateBlynkIntegrationRequest();
|
||||
req.setIntegration(obj);
|
||||
|
||||
ApplicationStore.createBlynkIntegration(req, () => {
|
||||
navigate(`/tenants/${props.application.getTenantId()}/applications/${props.application.getId()}/integrations`);
|
||||
});
|
||||
};
|
||||
|
||||
const i = new BlynkIntegration();
|
||||
|
||||
return (
|
||||
<Card title="Add Blynk integration">
|
||||
<BlynkIngrationForm initialValues={i} onFinish={onFinish} />
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
export default CreateBlynkIntegration;
|
@@ -1,47 +0,0 @@
|
||||
import { useNavigate } from "react-router-dom";
|
||||
|
||||
import { Card } from "antd";
|
||||
|
||||
import type { Application } from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
import {
|
||||
LoraCloudIntegration,
|
||||
LoraCloudModemGeolocationServices,
|
||||
CreateLoraCloudIntegrationRequest,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import LoRaCloudIntegrationForm from "./LoRaCloudIntegrationForm";
|
||||
import ApplicationStore from "../../../stores/ApplicationStore";
|
||||
|
||||
interface IProps {
|
||||
application: Application;
|
||||
}
|
||||
|
||||
function CreateLoRaCloudIntegration(props: IProps) {
|
||||
const navigate = useNavigate();
|
||||
|
||||
const onFinish = (obj: LoraCloudIntegration) => {
|
||||
obj.setApplicationId(props.application.getId());
|
||||
|
||||
const req = new CreateLoraCloudIntegrationRequest();
|
||||
req.setIntegration(obj);
|
||||
|
||||
ApplicationStore.createLoraCloudIntegration(req, () => {
|
||||
navigate(`/tenants/${props.application.getTenantId()}/applications/${props.application.getId()}/integrations`);
|
||||
});
|
||||
};
|
||||
|
||||
const i = new LoraCloudIntegration();
|
||||
const mgs = new LoraCloudModemGeolocationServices();
|
||||
mgs.setModemEnabled(true);
|
||||
mgs.setForwardFPortsList([192, 197, 198, 199]);
|
||||
|
||||
i.setModemGeolocationServices(mgs);
|
||||
|
||||
return (
|
||||
<Card title="Add Semtech LoRa Cloud™ integration">
|
||||
<LoRaCloudIntegrationForm initialValues={i} onFinish={onFinish} />
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
export default CreateLoRaCloudIntegration;
|
@@ -0,0 +1,56 @@
|
||||
import { useState, useEffect } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
|
||||
import { Card } from "antd";
|
||||
|
||||
import type {
|
||||
Application,
|
||||
BlynkIntegration,
|
||||
GetBlynkIntegrationResponse,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
import {
|
||||
GetBlynkIntegrationRequest,
|
||||
UpdateBlynkIntegrationRequest,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import BlynkIntegrationForm from "./BlynkIntegrationForm";
|
||||
import ApplicationStore from "../../../stores/ApplicationStore";
|
||||
|
||||
interface IProps {
|
||||
application: Application;
|
||||
}
|
||||
|
||||
function EditBlynkIntegration(props: IProps) {
|
||||
const navigate = useNavigate();
|
||||
const [integration, setIntegration] = useState<BlynkIntegration | undefined>(undefined);
|
||||
|
||||
useEffect(() => {
|
||||
const req = new GetBlynkIntegrationRequest();
|
||||
req.setApplicationId(props.application.getId());
|
||||
|
||||
ApplicationStore.getBlynkIntegration(req, (resp: GetBlynkIntegrationResponse) => {
|
||||
setIntegration(resp.getIntegration());
|
||||
});
|
||||
}, [props]);
|
||||
|
||||
const onFinish = (obj: BlynkIntegration) => {
|
||||
const req = new UpdateBlynkIntegrationRequest();
|
||||
req.setIntegration(obj);
|
||||
|
||||
ApplicationStore.updateBlynkIntegration(req, () => {
|
||||
navigate(`/tenants/${props.application.getTenantId()}/applications/${props.application.getId()}/integrations`);
|
||||
});
|
||||
};
|
||||
|
||||
if (integration === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Card title="Update Blynk integration">
|
||||
<BlynkIntegrationForm initialValues={integration} onFinish={onFinish} />
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
export default EditBlynkIntegration;
|
@@ -1,56 +0,0 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { useNavigate } from "react-router-dom";
|
||||
|
||||
import { Card } from "antd";
|
||||
|
||||
import type {
|
||||
Application,
|
||||
LoraCloudIntegration,
|
||||
GetLoraCloudIntegrationResponse,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
import {
|
||||
GetLoraCloudIntegrationRequest,
|
||||
UpdateLoraCloudIntegrationRequest,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import LoRaCloudIntegrationForm from "./LoRaCloudIntegrationForm";
|
||||
import ApplicationStore from "../../../stores/ApplicationStore";
|
||||
|
||||
interface IProps {
|
||||
application: Application;
|
||||
}
|
||||
|
||||
function EditLoRaCloudIntegration(props: IProps) {
|
||||
const navigate = useNavigate();
|
||||
const [integration, setIntegration] = useState<LoraCloudIntegration | undefined>(undefined);
|
||||
|
||||
useEffect(() => {
|
||||
const req = new GetLoraCloudIntegrationRequest();
|
||||
req.setApplicationId(props.application.getId());
|
||||
|
||||
ApplicationStore.getLoraCloudIntegration(req, (resp: GetLoraCloudIntegrationResponse) => {
|
||||
setIntegration(resp.getIntegration());
|
||||
});
|
||||
}, [props]);
|
||||
|
||||
const onFinish = (obj: LoraCloudIntegration) => {
|
||||
const req = new UpdateLoraCloudIntegrationRequest();
|
||||
req.setIntegration(obj);
|
||||
|
||||
ApplicationStore.updateLoraCloudIntegration(req, () => {
|
||||
navigate(`/tenants/${props.application.getTenantId()}/applications/${props.application.getId()}/integrations`);
|
||||
});
|
||||
};
|
||||
|
||||
if (integration === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<Card title="Update Semtech LoRa Cloud™ integration">
|
||||
<LoRaCloudIntegrationForm initialValues={integration} onFinish={onFinish} />
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
|
||||
export default EditLoRaCloudIntegration;
|
@@ -1,261 +0,0 @@
|
||||
import { useState, useEffect } from "react";
|
||||
|
||||
import { Form, Input, InputNumber, Switch, Button, Tabs, Collapse } from "antd";
|
||||
import { MinusCircleOutlined, PlusOutlined } from "@ant-design/icons";
|
||||
|
||||
import {
|
||||
LoraCloudIntegration,
|
||||
LoraCloudModemGeolocationServices,
|
||||
} from "@chirpstack/chirpstack-api-grpc-web/api/application_pb";
|
||||
|
||||
import { onFinishFailed } from "../../helpers";
|
||||
|
||||
interface IProps {
|
||||
initialValues: LoraCloudIntegration;
|
||||
onFinish: (obj: LoraCloudIntegration) => void;
|
||||
}
|
||||
|
||||
function LoRaCloudIntegrationForm(props: IProps) {
|
||||
const [modemEnabled, setModemEnabled] = useState<boolean>(false);
|
||||
const [geolocationTdoa, setGeolocationTdoa] = useState<boolean>(false);
|
||||
const [geolocationRssi, setGeolocationRssi] = useState<boolean>(false);
|
||||
const [geolocationWifi, setGeolocationWifi] = useState<boolean>(false);
|
||||
const [geolocationGnss, setGeolocationGnss] = useState<boolean>(false);
|
||||
|
||||
useEffect(() => {
|
||||
const v = props.initialValues;
|
||||
const mgs = v.getModemGeolocationServices();
|
||||
|
||||
if (mgs !== undefined) {
|
||||
setModemEnabled(mgs.getModemEnabled());
|
||||
setGeolocationTdoa(mgs.getGeolocationTdoa());
|
||||
setGeolocationRssi(mgs.getGeolocationRssi());
|
||||
setGeolocationWifi(mgs.getGeolocationWifi());
|
||||
setGeolocationGnss(mgs.getGeolocationGnss());
|
||||
}
|
||||
}, [props]);
|
||||
|
||||
const onFinish = (values: LoraCloudIntegration.AsObject) => {
|
||||
const v = Object.assign(props.initialValues.toObject(), values);
|
||||
const mgsv = v.modemGeolocationServices;
|
||||
|
||||
const mgs = new LoraCloudModemGeolocationServices();
|
||||
|
||||
if (mgsv !== undefined) {
|
||||
mgs.setToken(mgsv.token);
|
||||
mgs.setModemEnabled(mgsv.modemEnabled);
|
||||
mgs.setForwardFPortsList(mgsv.forwardFPortsList);
|
||||
mgs.setGnssUseRxTime(mgsv.gnssUseRxTime);
|
||||
mgs.setGnssUseGatewayLocation(mgsv.gnssUseGatewayLocation);
|
||||
mgs.setParseTlv(mgsv.parseTlv);
|
||||
mgs.setGeolocationBufferTtl(mgsv.geolocationBufferTtl);
|
||||
mgs.setGeolocationMinBufferSize(mgsv.geolocationMinBufferSize);
|
||||
mgs.setGeolocationTdoa(mgsv.geolocationTdoa);
|
||||
mgs.setGeolocationRssi(mgsv.geolocationRssi);
|
||||
mgs.setGeolocationGnss(mgsv.geolocationGnss);
|
||||
mgs.setGeolocationGnssPayloadField(mgsv.geolocationGnssPayloadField);
|
||||
mgs.setGeolocationWifi(mgsv.geolocationWifi);
|
||||
mgs.setGeolocationWifiPayloadField(mgsv.geolocationWifiPayloadField);
|
||||
mgs.setGeolocationGnssUseRxTime(mgsv.geolocationGnssUseRxTime);
|
||||
}
|
||||
|
||||
const i = new LoraCloudIntegration();
|
||||
i.setApplicationId(v.applicationId);
|
||||
i.setModemGeolocationServices(mgs);
|
||||
|
||||
props.onFinish(i);
|
||||
};
|
||||
|
||||
const onModemEnabledChange = (v: boolean) => {
|
||||
setModemEnabled(v);
|
||||
};
|
||||
|
||||
const onGeolocationTdoaChange = (v: boolean) => {
|
||||
setGeolocationTdoa(v);
|
||||
};
|
||||
|
||||
const onGeolocationRssiChange = (v: boolean) => {
|
||||
setGeolocationRssi(v);
|
||||
};
|
||||
|
||||
const onGeolocationWifiChange = (v: boolean) => {
|
||||
setGeolocationWifi(v);
|
||||
};
|
||||
|
||||
const onGeolocationGnssChange = (v: boolean) => {
|
||||
setGeolocationGnss(v);
|
||||
};
|
||||
|
||||
return (
|
||||
<Form
|
||||
layout="vertical"
|
||||
initialValues={props.initialValues.toObject()}
|
||||
onFinish={onFinish}
|
||||
onFinishFailed={onFinishFailed}
|
||||
>
|
||||
<Tabs>
|
||||
<Tabs.TabPane tab="Modem & Geolocation Services" key="1">
|
||||
<Form.Item
|
||||
label="Token"
|
||||
name={["modemGeolocationServices", "token"]}
|
||||
tooltip="This token can be obtained from loracloud.com"
|
||||
rules={[{ required: true, message: "Please enter a token!" }]}
|
||||
>
|
||||
<Input type="password" />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
name={["modemGeolocationServices", "modemEnabled"]}
|
||||
label="I am using LoRa Edge™ LR1110 or my device uses LoRa Basics™ Modem-E"
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch onChange={onModemEnabledChange} />
|
||||
</Form.Item>
|
||||
{modemEnabled && (
|
||||
<Form.List name={["modemGeolocationServices", "forwardFPortsList"]}>
|
||||
{(fields, { add, remove }) => (
|
||||
<Form.Item label="Forward messages on these FPorts to LoRa Cloud">
|
||||
{fields.map((field, index) => (
|
||||
<Form.Item
|
||||
{...field}
|
||||
rules={[{ required: true, message: "Please a FPort value!" }]}
|
||||
style={{
|
||||
display: "inline-block",
|
||||
width: "100px",
|
||||
marginRight: "24px",
|
||||
}}
|
||||
>
|
||||
<InputNumber
|
||||
min={1}
|
||||
max={255}
|
||||
addonAfter={<MinusCircleOutlined onClick={() => remove(index)} />}
|
||||
/>
|
||||
</Form.Item>
|
||||
))}
|
||||
<Button type="dashed" onClick={() => add()} icon={<PlusOutlined />} />
|
||||
</Form.Item>
|
||||
)}
|
||||
</Form.List>
|
||||
)}
|
||||
{modemEnabled && (
|
||||
<Form.Item
|
||||
label="Use receive timestamp for GNSS geolocation"
|
||||
name={["modemGeolocationServices", "gnssUseRxTime"]}
|
||||
tooltip="If enabled, the receive timestamp of the gateway will be used as reference instead of the timestamp included in the GNSS payload."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)}
|
||||
{modemEnabled && (
|
||||
<Form.Item
|
||||
label="Use location of receiving gateways for assistance"
|
||||
name={["modemGeolocationServices", "gnssUseGatewayLocation"]}
|
||||
tooltip="If enabled, the gateway location will be provided to the geolocation resolver to aid the resolving process."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)}
|
||||
{modemEnabled && (
|
||||
<Form.Item
|
||||
label="My device adheres to the LoRa Edge™ Tracker Modem-E Version Reference Design protocol"
|
||||
name={["modemGeolocationServices", "parseTlv"]}
|
||||
tooltip="If enabled, ChirpStack will try to resolve the location of the device if a geolocation payload is detected."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)}
|
||||
<Collapse style={{ marginBottom: 24 }}>
|
||||
<Collapse.Panel header="Advanced geolocation options" key={1}>
|
||||
<Form.Item
|
||||
label="TDOA based geolocation"
|
||||
name={["modemGeolocationServices", "geolocationTdoa"]}
|
||||
tooltip="If enabled, geolocation will be based on time-difference of arrival (TDOA). Please note that this requires gateways that support the fine-timestamp feature."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch onChange={onGeolocationTdoaChange} />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
label="RSSI based geolocation"
|
||||
name={["modemGeolocationServices", "geolocationRssi"]}
|
||||
tooltip="If enabled, geolocation will be based on RSSI values reported by the receiving gateways."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch onChange={onGeolocationRssiChange} />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
label="Wi-Fi based geolocation"
|
||||
name={["modemGeolocationServices", "geolocationWifi"]}
|
||||
tooltip="If enabled, geolocation will be based on Wi-Fi access-point data reported by the device."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch onChange={onGeolocationWifiChange} />
|
||||
</Form.Item>
|
||||
<Form.Item
|
||||
label="GNSS based geolocation (LR1110)"
|
||||
name={["modemGeolocationServices", "geolocationGnss"]}
|
||||
tooltip="If enabled, geolocation will be based on GNSS data reported by the device."
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch onChange={onGeolocationGnssChange} />
|
||||
</Form.Item>
|
||||
{(geolocationTdoa || geolocationRssi) && (
|
||||
<Form.Item
|
||||
label="Geolocation buffer (TTL in seconds)"
|
||||
name={["modemGeolocationServices", "geolocationBufferTtl"]}
|
||||
tooltip="The time in seconds that historical uplinks will be stored in the geolocation buffer. Used for TDOA and RSSI geolocation."
|
||||
>
|
||||
<InputNumber min={0} max={86400} />
|
||||
</Form.Item>
|
||||
)}
|
||||
{(geolocationTdoa || geolocationRssi) && (
|
||||
<Form.Item
|
||||
label="Geolocation min buffer size"
|
||||
name={["modemGeolocationServices", "geolocationMinBufferSize"]}
|
||||
tooltip="The minimum buffer size required before using geolocation. Using multiple uplinks for geolocation can increase the accuracy of the geolocation results. Used for TDOA and RSSI geolocation."
|
||||
>
|
||||
<InputNumber min={0} />
|
||||
</Form.Item>
|
||||
)}
|
||||
{geolocationWifi && (
|
||||
<Form.Item
|
||||
label="Wifi payload field"
|
||||
name={["modemGeolocationServices", "geolocationWifiPayloadField"]}
|
||||
tooltip="This must match the name of the field in the decoded payload which holds array of Wifi access-points. Each element in the array must contain two keys: 1) macAddress: array of 6 bytes, 2) signalStrength: RSSI of the access-point."
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
)}
|
||||
{geolocationGnss && (
|
||||
<Form.Item
|
||||
label="GNSS payload field"
|
||||
name={["modemGeolocationServices", "geolocationGnssPayloadField"]}
|
||||
tooltip="This must match the name of the field in the decoded payload which holds the LR1110 GNSS bytes."
|
||||
>
|
||||
<Input />
|
||||
</Form.Item>
|
||||
)}
|
||||
{geolocationGnss && (
|
||||
<Form.Item
|
||||
label="Use receive timestamp for GNSS geolocation"
|
||||
name={["modemGeolocationServices", "geolocationGnssUseRxTime"]}
|
||||
valuePropName="checked"
|
||||
>
|
||||
<Switch />
|
||||
</Form.Item>
|
||||
)}
|
||||
</Collapse.Panel>
|
||||
</Collapse>
|
||||
</Tabs.TabPane>
|
||||
</Tabs>
|
||||
<Form.Item>
|
||||
<Button type="primary" htmlType="submit">
|
||||
Submit
|
||||
</Button>
|
||||
</Form.Item>
|
||||
</Form>
|
||||
);
|
||||
}
|
||||
|
||||
export default LoRaCloudIntegrationForm;
|
@@ -44,12 +44,16 @@ function CreateDeviceProfile(props: IProps) {
|
||||
* @param {number} input.fPort Uplink fPort.
|
||||
* @param {Record<string, string>} input.variables Object containing the configured device variables.
|
||||
*
|
||||
* @returns {{data: object}} Object representing the decoded payload.
|
||||
* @returns {{data: object, errors: string[], warnings: string[]}}
|
||||
* An object containing:
|
||||
* - data: Object representing the decoded payload.
|
||||
* - errors: An array of errors (optional).
|
||||
* - warnings: An array of warnings (optional).
|
||||
*/
|
||||
function decodeUplink(input) {
|
||||
return {
|
||||
data: {
|
||||
// temp: 22.5
|
||||
temp: 22.5,
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -61,11 +65,17 @@ function decodeUplink(input) {
|
||||
* @param {object} input.data Object representing the payload that must be encoded.
|
||||
* @param {Record<string, string>} input.variables Object containing the configured device variables.
|
||||
*
|
||||
* @returns {{bytes: number[]}} Byte array containing the downlink payload.
|
||||
* @returns {{bytes: number[], fPort: number, errors: string[], warnings: string[]}}
|
||||
* An object containing:
|
||||
* - bytes: Byte array containing the downlink payload.
|
||||
* - fPort: The downlink LoRaWAN fPort.
|
||||
* - errors: An array of errors (optional).
|
||||
* - warnings: An array of warnings (optional).
|
||||
*/
|
||||
function encodeDownlink(input) {
|
||||
return {
|
||||
// bytes: [225, 230, 255, 0]
|
||||
fPort: 10,
|
||||
bytes: [225, 230, 255, 0],
|
||||
};
|
||||
}
|
||||
`;
|
||||
|
Reference in New Issue
Block a user