Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/dev' into j2735/conform-to-2020-…
Browse files Browse the repository at this point in the history
…standard
  • Loading branch information
dmccoystephenson committed Dec 26, 2023
2 parents 402ae59 + 760c581 commit 2947c50
Show file tree
Hide file tree
Showing 39 changed files with 338 additions and 118 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ Once the ODE is deployed and running locally, you may access the ODE's demonstra
- [WYDOT Log Records](data/wydotLogRecords.h)
3. Press `Upload` button to upload the file to ODE.

Upload records within the files must be embedding BSM and/or TIM messages wrapped in J2735 MessageFrame and ASN.1 UPER encoded, wrapped in IEEE 1609.2 envelope and ASN.1 COER encoded binary format. Please review the files in the [data](data) folder for samples of each supported type. By uploading a valid data file, you will be able to observe the decoded messages contained within the file appear in the web UI page while connected to the WebSocket interface.
Upload records within the files can be embedding BSM, MAP and/or TIM messages wrapped in J2735 MessageFrame and ASN.1 UPER encoded, wrapped in IEEE 1609.2 envelope and ASN.1 COER encoded binary format. Log processing of files that contain messages with WSMP headers within the ASN.1 UPER encoded messages is supported but the header will be removed before processing. Please review the files in the [data](data) folder for samples of each supported type. By uploading a valid data file, you will be able to observe the decoded messages contained within the file appear in the web UI page while connected to the WebSocket interface.

Another way data can be uploaded to the ODE is through copying the file to the location specified by the `ode.uploadLocationRoot/ode.uploadLocationObuLog`property. If not specified, Default locations would be `uploads/bsmlog`sub-directory off of the location where ODE is launched.

Expand Down
4 changes: 4 additions & 0 deletions data/TIM_Message_Testing_Files/deprecated/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Deprecated TIM Message Testing Files
The files in this directory are deprecated and should not be used for testing. They are kept here for reference only.

These files are deprecated because they do not get processed correctly at this time.
Binary file added data/bsmLogDuringEvent_commsignia.gz
Binary file not shown.
Binary file added data/bsmTx_commsignia.gz
Binary file not shown.
Binary file added data/rxMsg_commsignia_map.gz
Binary file not shown.
Binary file added data/rxMsg_commsignia_tim.gz
Binary file not shown.
Binary file added data/rxMsg_map_and_tim.gz
Binary file not shown.
12 changes: 12 additions & 0 deletions docker-compose-confluent-cloud.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ services:
CONFLUENT_KEY: ${CONFLUENT_KEY}
CONFLUENT_SECRET: ${CONFLUENT_SECRET}
ACM_CONFIG_FILE: adm.properties
ACM_LOG_TO_CONSOLE: ${ADM_LOG_TO_CONSOLE}
ACM_LOG_TO_FILE: ${ADM_LOG_TO_FILE}
ACM_LOG_LEVEL: ${ADM_LOG_LEVEL}
volumes:
- ${DOCKER_SHARED_VOLUME}:/asn1_codec_share
logging:
Expand All @@ -62,6 +65,9 @@ services:
CONFLUENT_KEY: ${CONFLUENT_KEY}
CONFLUENT_SECRET: ${CONFLUENT_SECRET}
ACM_CONFIG_FILE: aem.properties
ACM_LOG_TO_CONSOLE: ${AEM_LOG_TO_CONSOLE}
ACM_LOG_TO_FILE: ${AEM_LOG_TO_FILE}
ACM_LOG_LEVEL: ${AEM_LOG_LEVEL}
volumes:
- ${DOCKER_SHARED_VOLUME}:/asn1_codec_share
logging:
Expand All @@ -79,6 +85,9 @@ services:
CONFLUENT_KEY: ${CONFLUENT_KEY}
CONFLUENT_SECRET: ${CONFLUENT_SECRET}
PPM_CONFIG_FILE: cdot_ppmBsm.properties
PPM_LOG_TO_CONSOLE: ${PPM_BSM_LOG_TO_CONSOLE}
PPM_LOG_TO_FILE: ${PPM_BSM_LOG_TO_FILE}
PPM_LOG_LEVEL: ${PPM_BSM_LOG_LEVEL}

ppmtim:
build:
Expand All @@ -90,6 +99,9 @@ services:
CONFLUENT_KEY: ${CONFLUENT_KEY}
CONFLUENT_SECRET: ${CONFLUENT_SECRET}
PPM_CONFIG_FILE: cdot_ppmTim.properties
PPM_LOG_TO_CONSOLE: ${PPM_TIM_LOG_TO_CONSOLE}
PPM_LOG_TO_FILE: ${PPM_TIM_LOG_TO_FILE}
PPM_LOG_LEVEL: ${PPM_TIM_LOG_LEVEL}

cvpep_bsm_depositor:
build: ./jpo-s3-deposit
Expand Down
12 changes: 12 additions & 0 deletions docker-compose-ppm-nsv.yml
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,9 @@ services:
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
ACM_CONFIG_FILE: adm.properties
ACM_LOG_TO_CONSOLE: ${ADM_LOG_TO_CONSOLE}
ACM_LOG_TO_FILE: ${ADM_LOG_TO_FILE}
ACM_LOG_LEVEL: ${ADM_LOG_LEVEL}
depends_on:
- kafka
volumes:
Expand All @@ -79,6 +82,9 @@ services:
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
ACM_CONFIG_FILE: aem.properties
ACM_LOG_TO_CONSOLE: ${AEM_LOG_TO_CONSOLE}
ACM_LOG_TO_FILE: ${AEM_LOG_TO_FILE}
ACM_LOG_LEVEL: ${AEM_LOG_LEVEL}
depends_on:
- kafka
volumes:
Expand All @@ -96,6 +102,9 @@ services:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
PPM_CONFIG_FILE: ${PPM_BSM_CONFIG_FILE}
PPM_MAP_FILE: ${PPM_MAP_FILE}
PPM_LOG_TO_CONSOLE: ${PPM_BSM_LOG_TO_CONSOLE}
PPM_LOG_TO_FILE: ${PPM_BSM_LOG_TO_FILE}
PPM_LOG_LEVEL: ${PPM_BSM_LOG_LEVEL}
volumes:
- ${DOCKER_SHARED_VOLUME}/ppm_bsm:/ppm_data
depends_on:
Expand All @@ -113,6 +122,9 @@ services:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
PPM_CONFIG_FILE: ${PPM_TIM_CONFIG_FILE}
PPM_MAP_FILE: ${PPM_MAP_FILE}
PPM_LOG_TO_CONSOLE: ${PPM_TIM_LOG_TO_CONSOLE}
PPM_LOG_TO_FILE: ${PPM_TIM_LOG_TO_FILE}
PPM_LOG_LEVEL: ${PPM_TIM_LOG_LEVEL}
volumes:
- ${DOCKER_SHARED_VOLUME}/ppm_tim:/ppm_data
depends_on:
Expand Down
12 changes: 12 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,9 @@ services:
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
ACM_CONFIG_FILE: adm.properties
ACM_LOG_TO_CONSOLE: ${ADM_LOG_TO_CONSOLE}
ACM_LOG_TO_FILE: ${ADM_LOG_TO_FILE}
ACM_LOG_LEVEL: ${ADM_LOG_LEVEL}
depends_on:
- kafka
volumes:
Expand All @@ -97,6 +100,9 @@ services:
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
ACM_CONFIG_FILE: aem.properties
ACM_LOG_TO_CONSOLE: ${AEM_LOG_TO_CONSOLE}
ACM_LOG_TO_FILE: ${AEM_LOG_TO_FILE}
ACM_LOG_LEVEL: ${AEM_LOG_LEVEL}
depends_on:
- kafka
volumes:
Expand All @@ -114,6 +120,9 @@ services:
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
PPM_CONFIG_FILE: ppmBsm.properties
PPM_LOG_TO_CONSOLE: ${PPM_BSM_LOG_TO_CONSOLE}
PPM_LOG_TO_FILE: ${PPM_BSM_LOG_TO_FILE}
PPM_LOG_LEVEL: ${PPM_BSM_LOG_LEVEL}
volumes:
- ${DOCKER_SHARED_VOLUME}:/ppm_data
depends_on:
Expand All @@ -131,6 +140,9 @@ services:
environment:
DOCKER_HOST_IP: ${DOCKER_HOST_IP}
PPM_CONFIG_FILE: ppmTim.properties
PPM_LOG_TO_CONSOLE: ${PPM_TIM_LOG_TO_CONSOLE}
PPM_LOG_TO_FILE: ${PPM_TIM_LOG_TO_FILE}
PPM_LOG_LEVEL: ${PPM_TIM_LOG_LEVEL}
volumes:
- ${DOCKER_SHARED_VOLUME}:/ppm_data
depends_on:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,6 @@ public void testGettersAndSetters() {
@Test
public void testSnmpTimestampFromIso() throws ParseException {
String snmpTS = SNMP.snmpTimestampFromIso("2017-05-04T21:55:00-05:00");
assertEquals("07E105041537", snmpTS);
assertEquals("07E1050415370000", snmpTS);
}
}
15 changes: 15 additions & 0 deletions jpo-ode-plugins/src/main/java/us/dot/its/jpo/ode/plugin/SNMP.java
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,21 @@ public void setStatus(int status) {
public static String snmpTimestampFromIso(String isoTimestamp) throws ParseException {
ZonedDateTime zdt = DateTimeUtils.isoDateTime(isoTimestamp);


StringBuilder sb = new StringBuilder();

sb.append(String.format("%04X", zdt.getYear()));
sb.append(String.format("%02X", zdt.getMonthValue()));
sb.append(String.format("%02X", zdt.getDayOfMonth()));
sb.append(String.format("%02X", zdt.getHour()));
sb.append(String.format("%02X", zdt.getMinute()));
sb.append(String.format("%02X", zdt.getSecond()));
sb.append(String.format("%02X", zdt.getNano()));
return sb.toString();
}

public static String fourOneSnmpTimestampFromIso(String isoTimestamp) throws ParseException {
ZonedDateTime zdt = DateTimeUtils.isoDateTime(isoTimestamp);
StringBuilder sb = new StringBuilder();

sb.append(String.format("%04X", zdt.getYear()));
Expand Down
2 changes: 1 addition & 1 deletion jpo-ode-svcs/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@
<dependency>
<groupId>org.snmp4j</groupId>
<artifactId>snmp4j</artifactId>
<version>3.4.4</version>
<version>3.7.7</version>
</dependency>

</dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@

import org.snmp4j.security.AuthMD5;
import org.snmp4j.security.AuthSHA;
import org.snmp4j.security.AuthHMAC128SHA224;
import org.snmp4j.security.AuthHMAC192SHA256;
import org.snmp4j.security.AuthHMAC256SHA384;
import org.snmp4j.security.AuthHMAC384SHA512;
import org.snmp4j.security.PrivAES128;

import org.snmp4j.security.SecurityProtocols;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
Expand All @@ -50,7 +56,13 @@ public static void main(String[] args) throws MalformedObjectNameException, Inte
mbs.registerMBean(mBean, name);

SecurityProtocols.getInstance().addAuthenticationProtocol(new AuthSHA());
SecurityProtocols.getInstance().addAuthenticationProtocol(new AuthHMAC128SHA224());
SecurityProtocols.getInstance().addAuthenticationProtocol(new AuthHMAC192SHA256());
SecurityProtocols.getInstance().addAuthenticationProtocol(new AuthHMAC256SHA384());
SecurityProtocols.getInstance().addAuthenticationProtocol(new AuthHMAC384SHA512());
SecurityProtocols.getInstance().addAuthenticationProtocol(new AuthMD5());
SecurityProtocols.getInstance().addPrivacyProtocol(new PrivAES128());

}

@Bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@
package us.dot.its.jpo.ode.coder.stream;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -47,6 +50,7 @@
import us.dot.its.jpo.ode.model.SerialId;
import us.dot.its.jpo.ode.util.JsonUtils;
import us.dot.its.jpo.ode.util.XmlUtils;
import us.dot.its.jpo.ode.util.JsonUtils.JsonUtilsException;

public class LogFileToAsn1CodecPublisher implements Asn1CodecPublisher {

Expand All @@ -61,6 +65,7 @@ public LogFileToAsn1CodecPublisherException(String string, Exception e) {
}

protected static final Logger logger = LoggerFactory.getLogger(LogFileToAsn1CodecPublisher.class);
protected static HashMap<String, String> msgStartFlags = new HashMap<String, String>();

protected StringPublisher publisher;
protected LogFileParser fileParser;
Expand All @@ -69,6 +74,9 @@ public LogFileToAsn1CodecPublisherException(String string, Exception e) {
public LogFileToAsn1CodecPublisher(StringPublisher dataPub) {
this.publisher = dataPub;
this.serialId = new SerialId();
msgStartFlags.put("BSM", "0014");
msgStartFlags.put("TIM", "001f");
msgStartFlags.put("MAP", "0012");
}

public List<OdeData> publish(BufferedInputStream bis, String fileName, ImporterFileType fileType)
Expand All @@ -77,7 +85,7 @@ public List<OdeData> publish(BufferedInputStream bis, String fileName, ImporterF
ParserStatus status;

List<OdeData> dataList = new ArrayList<>();
if (fileType == ImporterFileType.LEAR_LOG_FILE) {
if (fileType == ImporterFileType.LOG_FILE) {
fileParser = LogFileParser.factory(fileName);

do {
Expand All @@ -92,6 +100,7 @@ public List<OdeData> publish(BufferedInputStream bis, String fileName, ImporterF
} else {
logger.error("Failed to decode ASN.1 data");
}
bis = removeNextNewLineCharacter(bis);
} catch (Exception e) {
throw new LogFileToAsn1CodecPublisherException("Error parsing or publishing data.", e);
}
Expand Down Expand Up @@ -146,38 +155,34 @@ private void publishList(XmlUtils xmlUtils, List<OdeData> dataList) throws JsonP
serialId.setBundleSize(dataList.size());
for (OdeData odeData : dataList) {
OdeLogMetadata msgMetadata = (OdeLogMetadata) odeData.getMetadata();
OdeMsgPayload msgPayload = (OdeMsgPayload) odeData.getPayload();
msgMetadata.setSerialId(serialId);

if (isDriverAlertRecord()) {
logger.debug("Publishing a driverAlert.");

publisher.publish(JsonUtils.toJson(odeData, false),
publisher.getOdeProperties().getKafkaTopicDriverAlertJson());
}
else
{
} else {
if (isBsmRecord()) {
logger.debug("Publishing a BSM");
} else if(isSpatRecord()) {
} else if (isSpatRecord()) {
logger.debug("Publishing a Spat");
}else {
logger.debug("Publishing a TIM");
} else {
logger.debug("Publishing a TIM or MAP");
}

if(isSpatRecord() && msgMetadata instanceof OdeSpatMetadata
&& !((OdeSpatMetadata)msgMetadata).getIsCertPresent() )
{
//Nothing: If Spat log file and IEEE1609Cert is not present, Skip the Ieee1609Dot2Data encoding
}
else
{
Asn1Encoding msgEncoding = new Asn1Encoding("root", "Ieee1609Dot2Data", EncodingRule.COER);
msgMetadata.addEncoding(msgEncoding);

if (!(isSpatRecord() && msgMetadata instanceof OdeSpatMetadata
&& !((OdeSpatMetadata) msgMetadata).getIsCertPresent())) {
if (checkHeader(msgPayload) == "Ieee1609Dot2Data") {
Asn1Encoding msgEncoding = new Asn1Encoding("root", "Ieee1609Dot2Data", EncodingRule.COER);
msgMetadata.addEncoding(msgEncoding);
}
}

Asn1Encoding unsecuredDataEncoding = new Asn1Encoding("unsecuredData", "MessageFrame",EncodingRule.UPER);

Asn1Encoding unsecuredDataEncoding = new Asn1Encoding("unsecuredData", "MessageFrame",
EncodingRule.UPER);
msgMetadata.addEncoding(unsecuredDataEncoding);


publisher.publish(xmlUtils.toXml(odeData),
publisher.getOdeProperties().getKafkaTopicAsn1DecoderInput());
Expand All @@ -186,4 +191,43 @@ private void publishList(XmlUtils xmlUtils, List<OdeData> dataList) throws JsonP
}
}

public String checkHeader(OdeMsgPayload payload) {
JSONObject payloadJson;
String header = null;
try {
payloadJson = JsonUtils.toJSONObject(payload.getData().toJson());
String hexPacket = payloadJson.getString("bytes");

for (String key : msgStartFlags.keySet()) {
String startFlag = msgStartFlags.get(key);
int startIndex = hexPacket.toLowerCase().indexOf(startFlag);
logger.debug("Start index for " + key + "(" + startFlag + ")" + " is: " + startIndex);
if (startIndex <= 20 && startIndex != 0 && startIndex != -1) {
logger.debug("Message has supported Ieee1609Dot2Data header, adding encoding rule to Asn1DecoderInput XML");
header = "Ieee1609Dot2Data";
break;
}
logger.debug("Payload JSON: " + payloadJson);
}
} catch (JsonUtilsException e) {
logger.error("JsonUtilsException while checking message header. Stacktrace: " + e.toString());

}
return header;
}

// This method will check if the next character is a newline character (0x0A in hex or 10 in converted decimal)
// or if the next character does not contain a newline character it will put that character back into the buffered input stream
public BufferedInputStream removeNextNewLineCharacter(BufferedInputStream bis) {
try {
bis.mark(1);
int nextByte = bis.read();
if (nextByte != 10) { // If the next byte is not a newline
bis.reset(); // Reset the stream back to the most recent mark
}
} catch (IOException e) {
e.printStackTrace();
}
return bis;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
public class ImporterDirectoryWatcher implements Runnable {

public enum ImporterFileType {
LEAR_LOG_FILE, JSON_FILE
LOG_FILE, JSON_FILE
}

private static final Logger logger = LoggerFactory.getLogger(ImporterDirectoryWatcher.class);
Expand Down
Loading

0 comments on commit 2947c50

Please sign in to comment.