mirror of
https://github.com/Kunzisoft/KeePassDX.git
synced 2025-12-04 15:49:33 +01:00
Merge branch 'feature/Dynamic_Memory' into feature/Dynamic_Memory_And_Encrypt_Module
This commit is contained in:
@@ -205,7 +205,7 @@ class DatabaseInputKDBX(cacheDirectory: File,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (mDatabase.kdbxVersion.toKotlinLong() >= DatabaseHeaderKDBX.FILE_VERSION_32_4.toKotlinLong()) {
|
if (mDatabase.kdbxVersion.toKotlinLong() >= DatabaseHeaderKDBX.FILE_VERSION_32_4.toKotlinLong()) {
|
||||||
loadInnerHeader(inputStreamXml, header)
|
readInnerHeader(inputStreamXml, header)
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -237,57 +237,56 @@ class DatabaseInputKDBX(cacheDirectory: File,
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
private fun loadInnerHeader(inputStream: InputStream, header: DatabaseHeaderKDBX) {
|
private fun readInnerHeader(inputStream: InputStream,
|
||||||
val lis = LittleEndianDataInputStream(inputStream)
|
header: DatabaseHeaderKDBX) {
|
||||||
|
|
||||||
while (true) {
|
val dataInputStream = LittleEndianDataInputStream(inputStream)
|
||||||
if (!readInnerHeader(lis, header)) break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Throws(IOException::class)
|
var readStream = true
|
||||||
private fun readInnerHeader(dataInputStream: LittleEndianDataInputStream,
|
while (readStream) {
|
||||||
header: DatabaseHeaderKDBX): Boolean {
|
val fieldId = dataInputStream.read().toByte()
|
||||||
val fieldId = dataInputStream.read().toByte()
|
|
||||||
|
|
||||||
val size = dataInputStream.readUInt().toKotlinInt()
|
val size = dataInputStream.readUInt().toKotlinInt()
|
||||||
if (size < 0) throw IOException("Corrupted file")
|
if (size < 0) throw IOException("Corrupted file")
|
||||||
|
|
||||||
var data = ByteArray(0)
|
var data = ByteArray(0)
|
||||||
if (size > 0) {
|
try {
|
||||||
if (fieldId != DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.Binary) {
|
if (size > 0) {
|
||||||
// TODO OOM here
|
if (fieldId != DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.Binary) {
|
||||||
data = dataInputStream.readBytes(size)
|
data = dataInputStream.readBytes(size)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
// OOM only if corrupted file
|
||||||
|
throw IOException("Corrupted file")
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
var result = true
|
readStream = true
|
||||||
when (fieldId) {
|
when (fieldId) {
|
||||||
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.EndOfHeader -> {
|
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.EndOfHeader -> {
|
||||||
result = false
|
readStream = false
|
||||||
}
|
}
|
||||||
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.InnerRandomStreamID -> {
|
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.InnerRandomStreamID -> {
|
||||||
header.setRandomStreamID(data)
|
header.setRandomStreamID(data)
|
||||||
}
|
}
|
||||||
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.InnerRandomstreamKey -> {
|
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.InnerRandomstreamKey -> {
|
||||||
header.innerRandomStreamKey = data
|
header.innerRandomStreamKey = data
|
||||||
}
|
}
|
||||||
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.Binary -> {
|
DatabaseHeaderKDBX.PwDbInnerHeaderV4Fields.Binary -> {
|
||||||
// Read in a file
|
// Read in a file
|
||||||
val protectedFlag = dataInputStream.read().toByte() == DatabaseHeaderKDBX.KdbxBinaryFlags.Protected
|
val protectedFlag = dataInputStream.read().toByte() == DatabaseHeaderKDBX.KdbxBinaryFlags.Protected
|
||||||
val byteLength = size - 1
|
val byteLength = size - 1
|
||||||
// No compression at this level
|
// No compression at this level
|
||||||
val protectedBinary = mDatabase.buildNewAttachment(
|
val protectedBinary = mDatabase.buildNewAttachment(
|
||||||
isRAMSufficient.invoke(byteLength.toLong()), false, protectedFlag)
|
isRAMSufficient.invoke(byteLength.toLong()), false, protectedFlag)
|
||||||
protectedBinary.getOutputDataStream(mDatabase.binaryCache).use { outputStream ->
|
protectedBinary.getOutputDataStream(mDatabase.binaryCache).use { outputStream ->
|
||||||
dataInputStream.readBytes(byteLength) { buffer ->
|
dataInputStream.readBytes(byteLength) { buffer ->
|
||||||
outputStream.write(buffer)
|
outputStream.write(buffer)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private enum class KdbContext {
|
private enum class KdbContext {
|
||||||
|
|||||||
Reference in New Issue
Block a user