Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
ssadaadadad committed Dec 31, 2019
0 parents commit fdd5849
Show file tree
Hide file tree
Showing 16 changed files with 60,040 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
.idea
/target
*.iml
16 changes: 16 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
# EDF parser for Kotlin
Small and simple library to work with EDF files written in Kotlin

### Examples
You can pass file
```kotlin
val file = File("/example.edf")
val edfFile = EdfParser.parse(file)
```

Or stream
```kotlin
class EdfParserExample
val stream: InputStream = EdfParserExample::class.java.getResourceAsStream("/example.edf")
val edfFile = EdfParser.parse(stream)
```
59 changes: 59 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>com.npwork</groupId>
<artifactId>kotlin-edf-parser</artifactId>
<version>1.0-SNAPSHOT</version>

<properties>
<kotlin.version>1.3.61</kotlin.version>
<junit.version>5.5.2</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib-jdk8</artifactId>
<version>${kotlin.version}</version>
</dependency>
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies>

<build>
<sourceDirectory>src/main/kotlin</sourceDirectory>
<plugins>
<plugin>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-maven-plugin</artifactId>
<version>${kotlin.version}</version>
<executions>
<execution>
<id>compile</id>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
<execution>
<id>test-compile</id>
<phase>test-compile</phase>
<goals>
<goal>test-compile</goal>
</goals>
</execution>
</executions>
<configuration>
<jvmTarget>1.8</jvmTarget>
</configuration>
</plugin>
</plugins>
</build>

</project>
39 changes: 39 additions & 0 deletions src/main/kotlin/com/npwork/edfparser/EDFConstants.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package com.npwork.edfparser

import java.nio.charset.Charset

object EDFConstants {
val CHARSET = Charset.forName("ASCII")

const val IDENTIFICATION_CODE_SIZE = 8
const val LOCAL_SUBJECT_IDENTIFICATION_SIZE = 80
const val LOCAL_REOCRDING_IDENTIFICATION_SIZE = 80
const val START_DATE_SIZE = 8
const val START_TIME_SIZE = 8
const val HEADER_SIZE = 8
const val DATA_FORMAT_VERSION_SIZE = 44
const val DURATION_DATA_RECORDS_SIZE = 8
const val NUMBER_OF_DATA_RECORDS_SIZE = 8
const val NUMBER_OF_CHANELS_SIZE = 4

const val LABEL_OF_CHANNEL_SIZE = 16
const val TRANSDUCER_TYPE_SIZE = 80
const val PHYSICAL_DIMENSION_OF_CHANNEL_SIZE = 8
const val PHYSICAL_MIN_IN_UNITS_SIZE = 8
const val PHYSICAL_MAX_IN_UNITS_SIZE = 8
const val DIGITAL_MIN_SIZE = 8
const val DIGITAL_MAX_SIZE = 8
const val PREFILTERING_SIZE = 80
const val NUMBER_OF_SAMPLES_SIZE = 8
const val RESERVED_SIZE = 32

/** The size of the EDF-Header-Record containing information about the recording */
const val HEADER_SIZE_RECORDING_INFO = (IDENTIFICATION_CODE_SIZE + LOCAL_SUBJECT_IDENTIFICATION_SIZE + LOCAL_REOCRDING_IDENTIFICATION_SIZE
+ START_DATE_SIZE + START_TIME_SIZE + HEADER_SIZE + DATA_FORMAT_VERSION_SIZE + DURATION_DATA_RECORDS_SIZE
+ NUMBER_OF_DATA_RECORDS_SIZE + NUMBER_OF_CHANELS_SIZE)

/** The size per channel of the EDF-Header-Record containing information a channel of the recording */
const val HEADER_SIZE_PER_CHANNEL = (LABEL_OF_CHANNEL_SIZE + TRANSDUCER_TYPE_SIZE + PHYSICAL_DIMENSION_OF_CHANNEL_SIZE
+ PHYSICAL_MIN_IN_UNITS_SIZE + PHYSICAL_MAX_IN_UNITS_SIZE + DIGITAL_MIN_SIZE + DIGITAL_MAX_SIZE
+ PREFILTERING_SIZE + NUMBER_OF_SAMPLES_SIZE + RESERVED_SIZE)
}
8 changes: 8 additions & 0 deletions src/main/kotlin/com/npwork/edfparser/EdfFormatException.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package com.npwork.edfparser

sealed class EdfFormatException(message: String? = null) : RuntimeException(message) {
class EmptyFile(message: String? = "File is empty") : EdfFormatException(message)
class WrongFormat(message: String? = "Wrong format of EDF file. Please check https://www.teuniz.net/edfbrowser/edf%20format%20description.html") : EdfFormatException(message)
class WrongHeader(message: String? = "Error during header parsing") : EdfFormatException(message)
class WrongSignal(message: String? = "Error during signal parsing") : EdfFormatException(message)
}
115 changes: 115 additions & 0 deletions src/main/kotlin/com/npwork/edfparser/EdfParser.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
package com.npwork.edfparser

import com.npwork.edfparser.dto.EdfFile
import com.npwork.edfparser.dto.EdfHeader
import com.npwork.edfparser.dto.EdfSignal
import com.npwork.edfparser.extensions.*
import java.io.File
import java.io.InputStream
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.channels.Channels
import java.nio.channels.ReadableByteChannel

object EdfParser {
fun parse(file: File): EdfFile = parse(file.inputStream())

fun parse(stream: InputStream): EdfFile {
if (stream.available() == 0)
throw EdfFormatException.EmptyFile()

val header = try {
parseHeader(stream)
} catch (e: Exception) {
throw EdfFormatException.WrongHeader()
}

val signal = try {
parseSignal(stream, header)
} catch (e: Exception) {
throw EdfFormatException.WrongSignal()
}

return EdfFile(header = header, signal = signal)
}

private fun parseHeader(stream: InputStream): EdfHeader {
var numberOfChannels = 0
return EdfHeader(
idCode = fun(): String {
val idCode = stream.readASCII(EDFConstants.IDENTIFICATION_CODE_SIZE)
ensureValidIdentificationCode(idCode)
return idCode
}(),
subjectID = stream.readASCII(EDFConstants.LOCAL_SUBJECT_IDENTIFICATION_SIZE),
recordingID = stream.readASCII(EDFConstants.LOCAL_REOCRDING_IDENTIFICATION_SIZE),
startDate = stream.readASCII(EDFConstants.START_DATE_SIZE),
startTime = stream.readASCII(EDFConstants.START_TIME_SIZE),
bytesInHeader = stream.readASCII(EDFConstants.HEADER_SIZE).trim().toInt(),
formatVersion = stream.readASCII(EDFConstants.DATA_FORMAT_VERSION_SIZE),
numberOfRecords = stream.readASCII(EDFConstants.NUMBER_OF_DATA_RECORDS_SIZE).trim().toInt(),
durationOfRecords = stream.readASCII(EDFConstants.DURATION_DATA_RECORDS_SIZE).trim().toDouble(),
numberOfChannels = fun(): Int {
numberOfChannels = stream.readASCII(EDFConstants.NUMBER_OF_CHANELS_SIZE).trim().toInt()
return numberOfChannels
}(),
channelLabels = stream.readASCIIBulk(EDFConstants.LABEL_OF_CHANNEL_SIZE, numberOfChannels),
transducerTypes = stream.readASCIIBulk(EDFConstants.TRANSDUCER_TYPE_SIZE, numberOfChannels),
dimensions = stream.readASCIIBulk(EDFConstants.PHYSICAL_DIMENSION_OF_CHANNEL_SIZE, numberOfChannels),
minInUnits = stream.readASCIIBulkDouble(EDFConstants.PHYSICAL_MIN_IN_UNITS_SIZE, numberOfChannels),
maxInUnits = stream.readASCIIBulkDouble(EDFConstants.PHYSICAL_MAX_IN_UNITS_SIZE, numberOfChannels),
digitalMin = stream.readASCIIBulkInt(EDFConstants.DIGITAL_MIN_SIZE, numberOfChannels),
digitalMax = stream.readASCIIBulkInt(EDFConstants.DIGITAL_MAX_SIZE, numberOfChannels),
prefilterings = stream.readASCIIBulk(EDFConstants.PREFILTERING_SIZE, numberOfChannels),
numberOfSamples = stream.readASCIIBulkInt(EDFConstants.NUMBER_OF_SAMPLES_SIZE, numberOfChannels),
reserveds = (1..numberOfChannels).map { stream.readNBytes(EDFConstants.RESERVED_SIZE) }
)
}

private fun ensureValidIdentificationCode(idCode: String) {
if (idCode.trim() != "0") {
throw EdfFormatException.WrongFormat()
}
}

private fun parseSignal(stream: InputStream, header: EdfHeader): EdfSignal {
val signal = EdfSignal(
unitsInDigit = (0 until header.numberOfChannels)
.map {
(header.maxInUnits[it] - header.minInUnits[it]) /
(header.digitalMax[it] - header.digitalMin[it])
}
.toTypedArray(),

digitalValues = (0 until header.numberOfChannels)
.map { ShortArray(header.numberOfRecords * header.numberOfSamples[it]) }
.toTypedArray(),

valuesInUnits = (0 until header.numberOfChannels)
.map { DoubleArray(header.numberOfRecords * header.numberOfSamples[it]) }
.toTypedArray()
)

val samplesPerRecord = header.numberOfSamples.sum()

val ch: ReadableByteChannel = Channels.newChannel(stream)
val bytebuf = ByteBuffer.allocate(samplesPerRecord * 2)
bytebuf.order(ByteOrder.LITTLE_ENDIAN)

for (i in 0 until header.numberOfRecords) {
bytebuf.rewind()
ch.read(bytebuf)
bytebuf.rewind()

for (j in 0 until header.numberOfChannels) {
for (k in 0 until header.numberOfSamples[j]) {
val s: Int = header.numberOfSamples[j] * i + k
signal.digitalValues[j][s] = bytebuf.short
signal.valuesInUnits[j][s] = signal.digitalValues[j][s] * signal.unitsInDigit[j]
}
}
}

return signal
}
}
9 changes: 9 additions & 0 deletions src/main/kotlin/com/npwork/edfparser/dto/EdfFile.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package com.npwork.edfparser.dto

data class EdfFile(
val header: EdfHeader,
val signal: EdfSignal
) {
val samples: List<Int> = (0 until header.numberOfChannels).map { signal.digitalValues[it].size }.toList()
val sampleRate: List<Double> = samples.map { it / header.durationOfRecords }.toList()
}
26 changes: 26 additions & 0 deletions src/main/kotlin/com/npwork/edfparser/dto/EdfHeader.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package com.npwork.edfparser.dto

data class EdfHeader(
val idCode: String,
val subjectID: String,
val recordingID: String,
val startDate: String,
val startTime: String,
val bytesInHeader: Int,
val formatVersion: String,
val numberOfRecords: Int,
val durationOfRecords: Double,
val numberOfChannels: Int,

// Channel info
val channelLabels: List<String>,
val transducerTypes: List<String>,
val dimensions: List<String>,
val minInUnits: List<Double>,
val maxInUnits: List<Double>,
val digitalMin: List<Int>,
val digitalMax: List<Int>,
val prefilterings: List<String>,
val numberOfSamples: List<Int>,
val reserveds: List<ByteArray>
)
27 changes: 27 additions & 0 deletions src/main/kotlin/com/npwork/edfparser/dto/EdfSignal.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
package com.npwork.edfparser.dto

data class EdfSignal(
var unitsInDigit: Array<Double>,
var digitalValues: Array<ShortArray>,
var valuesInUnits: Array<DoubleArray>
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false

other as EdfSignal

if (!unitsInDigit.contentEquals(other.unitsInDigit)) return false
if (!digitalValues.contentDeepEquals(other.digitalValues)) return false
if (!valuesInUnits.contentDeepEquals(other.valuesInUnits)) return false

return true
}

override fun hashCode(): Int {
var result = unitsInDigit.contentHashCode()
result = 31 * result + digitalValues.contentDeepHashCode()
result = 31 * result + valuesInUnits.contentDeepHashCode()
return result
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
package com.npwork.edfparser.extensions

import com.npwork.edfparser.EDFConstants
import java.io.InputStream

fun InputStream.readNBytes(length: Int): ByteArray {
val data = ByteArray(length)
this.read(data)
return data
}

fun InputStream.readASCII(length: Int): String = String(readNBytes(length), EDFConstants.CHARSET).trim()

fun InputStream.readASCIIBulk(length: Int, times: Int): List<String> = (1..times).map { this.readASCII(length) }
fun InputStream.readASCIIBulkDouble(length: Int, times: Int): List<Double> = (1..times).map { this.readASCII(length).trim().toDouble() }
fun InputStream.readASCIIBulkInt(length: Int, times: Int): List<Int> = (1..times).map { this.readASCII(length).trim().toInt() }
65 changes: 65 additions & 0 deletions src/test/kotlin/com/npwork/edfparser/EdfParserTest.kt
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
package com.npwork.edfparser

import com.npwork.edfparser.dto.EdfFile
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.Assertions.assertThrows
import org.junit.jupiter.api.DisplayName
import org.junit.jupiter.api.Nested
import org.junit.jupiter.api.Test

class EdfParserTest {
@Test
@DisplayName("EEG 38 channels")
fun eegFile_38_channels() {
val edfFile = EdfParser.parse(getResource("teuniz_net_test_file_eeg_38_ch.edf"))

assertEquals(9984, edfFile.header.bytesInHeader)
verifyNumberOfChannels(38, edfFile)
}


@Test
@DisplayName("Short ECG")
fun short_ecg() {
val edfFile = EdfParser.parse(getResource("short_ecg.edf"))
assertEquals(512, edfFile.header.bytesInHeader)
assertEquals(listOf(7684), edfFile.samples)
assertEquals(listOf(256.0000426444631), edfFile.sampleRate)
verifyNumberOfChannels(1, edfFile)
}

@DisplayName("Wrong cases")
@Nested
inner class WrongCases {
@Test
@DisplayName("Empty file")
fun emptyFile() {
assertThrows(EdfFormatException.EmptyFile::class.java) {
EdfParser.parse(getResource("empty_file.edf"))
}
}

@Test
@DisplayName("Header is not complete")
fun partialHeader() {
assertThrows(EdfFormatException.WrongHeader::class.java) {
EdfParser.parse(getResource("partial_header.edf"))
}
}

@Test
@DisplayName("From file format")
fun partialSignal() {
assertThrows(EdfFormatException.WrongHeader::class.java) {
EdfParser.parse(getResource("wrong_file.edf"))
}
}
}

private fun verifyNumberOfChannels(expectedNumberOfChannels: Int, edfFile: EdfFile) {
assertEquals(expectedNumberOfChannels, edfFile.header.channelLabels.size)
assertEquals(expectedNumberOfChannels, edfFile.header.numberOfSamples.size)
}

private fun getResource(fileName: String) = EdfParserTest::class.java.getResourceAsStream("/$fileName")
}
Empty file.
Loading

0 comments on commit fdd5849

Please sign in to comment.