Skip to content

Commit

Permalink
feat: at_persistence_secondary_server: Add "PublicKeyHash" to the "At…
Browse files Browse the repository at this point in the history
…Metadata" and run dart formatter
  • Loading branch information
sitaram-kalluri committed Nov 25, 2024
1 parent 61e599c commit d0e75f7
Show file tree
Hide file tree
Showing 10 changed files with 95 additions and 37 deletions.
6 changes: 6 additions & 0 deletions packages/at_persistence_secondary_server/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
## 3.0.66
- feat: Add "PublicKeyHash" to the "AtMetadata" which holds the hash value of encryption public key
- build[deps]: Upgraded the following packages:
- at_commons to v5.0.2
- lints to v5.0.0
- test to v1.25.8
## 3.0.65
- fix: Modified checks in commit log keystore _alwaysIncludeInSync method to match only reserved shared_key,
encryption public key and public key without namespace.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ class AtConfig {
persistenceManager = SecondaryPersistenceStoreFactory.getInstance()
.getSecondaryPersistenceStore(_atSign)!
.getHivePersistenceManager()!;
configKey = HiveKeyStoreHelper.getInstance().prepareKey('private:blocklist$_atSign');
configKey = HiveKeyStoreHelper.getInstance()
.prepareKey('private:blocklist$_atSign');
}

///Returns 'success' on adding unique [blockList] into blocklist.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ class HivePersistenceManager with HiveBase {
if (!Hive.isAdapterRegistered(AtMetaDataAdapter().typeId)) {
Hive.registerAdapter(AtMetaDataAdapter());
}
if (!Hive.isAdapterRegistered(PublicKeyHashAdapater().typeId)) {
Hive.registerAdapter(PublicKeyHashAdapater());
}

var secret = await _getHiveSecretFromFile(_atsign!, storagePath);
_boxName = AtUtils.getShaForAtSign(_atsign!);
Expand Down Expand Up @@ -113,10 +116,11 @@ class HivePersistenceManager with HiveBase {
}

//TODO change into to Duration and construct cron string dynamically
void scheduleKeyExpireTask(int? runFrequencyMins, {Duration? runTimeInterval, bool skipCommits = false}) {
void scheduleKeyExpireTask(int? runFrequencyMins,
{Duration? runTimeInterval, bool skipCommits = false}) {
logger.finest('scheduleKeyExpireTask starting cron job.');
Schedule schedule;
if(runTimeInterval != null){
if (runTimeInterval != null) {
schedule = Schedule(seconds: runTimeInterval.inSeconds);
} else {
schedule = Schedule.parse('*/$runFrequencyMins * * * *');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,9 @@ class AtMetaData extends HiveObject {
@HiveField(23)
String? skeEncAlgo;

@HiveField(24)
PublicKeyHash? pubKeyHash;

@override
String toString() {
return toJson().toString();
Expand All @@ -100,7 +103,8 @@ class AtMetaData extends HiveObject {
..encAlgo = encAlgo
..ivNonce = ivNonce
..skeEncKeyName = skeEncKeyName
..skeEncAlgo = skeEncAlgo;
..skeEncAlgo = skeEncAlgo
..pubKeyHash = pubKeyHash;
}

factory AtMetaData.fromCommonsMetadata(Metadata metadata) {
Expand All @@ -120,7 +124,8 @@ class AtMetaData extends HiveObject {
..encAlgo = metadata.encAlgo
..ivNonce = metadata.ivNonce
..skeEncKeyName = metadata.skeEncKeyName
..skeEncAlgo = metadata.skeEncAlgo;
..skeEncAlgo = metadata.skeEncAlgo
..pubKeyHash = metadata.pubKeyHash;
return AtMetadataBuilder(newAtMetaData: atMetadata).build();
}

Expand Down Expand Up @@ -151,6 +156,7 @@ class AtMetaData extends HiveObject {
map[AtConstants.ivOrNonce] = ivNonce;
map[AtConstants.sharedKeyEncryptedEncryptingKeyName] = skeEncKeyName;
map[AtConstants.sharedKeyEncryptedEncryptingAlgo] = skeEncAlgo;
map[AtConstants.sharedWithPublicKeyHash] = pubKeyHash?.toJson();
return map;
}

Expand Down Expand Up @@ -205,6 +211,8 @@ class AtMetaData extends HiveObject {
ivNonce = json[AtConstants.ivOrNonce];
skeEncKeyName = json[AtConstants.sharedKeyEncryptedEncryptingKeyName];
skeEncAlgo = json[AtConstants.sharedKeyEncryptedEncryptingAlgo];
pubKeyHash =
PublicKeyHash.fromJson(json[AtConstants.sharedWithPublicKeyHash]);

return this;
}
Expand Down Expand Up @@ -301,13 +309,14 @@ class AtMetaDataAdapter extends TypeAdapter<AtMetaData> {
..encAlgo = fields[20]
..ivNonce = fields[21]
..skeEncKeyName = fields[22]
..skeEncAlgo = fields[23];
..skeEncAlgo = fields[23]
..pubKeyHash = fields[24];
}

@override
void write(BinaryWriter writer, AtMetaData obj) {
writer
..writeByte(24)
..writeByte(25)
..writeByte(0)
..write(obj.createdBy)
..writeByte(1)
Expand Down Expand Up @@ -355,6 +364,33 @@ class AtMetaDataAdapter extends TypeAdapter<AtMetaData> {
..writeByte(22)
..write(obj.skeEncKeyName)
..writeByte(23)
..write(obj.skeEncAlgo);
..write(obj.skeEncAlgo)
..writeByte(24)
..write(obj.pubKeyHash);
}
}

@HiveType(typeId: 11)
class PublicKeyHashAdapater extends TypeAdapter<PublicKeyHash> {
@override
final int typeId = typeAdapterMap['PublicKeyHashAdapater'];

@override
PublicKeyHash read(BinaryReader reader) {
var numOfFields = reader.readByte();
var fields = <int, dynamic>{
for (var i = 0; i < numOfFields; i++) reader.readByte(): reader.read(),
};
return PublicKeyHash(fields[0] as String, fields[1] as String);
}

@override
void write(BinaryWriter writer, PublicKeyHash obj) {
writer
..writeByte(2)
..writeByte(0)
..write(obj.hash)
..writeByte(1)
..write(obj.hashingAlgo);
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import 'package:at_commons/at_commons.dart';
import 'package:at_persistence_secondary_server/at_persistence_secondary_server.dart';
import 'package:at_utils/at_logger.dart';

Expand All @@ -18,26 +19,26 @@ class AtMetadataBuilder {
/// ttb : Time to birth of the key. If ttb is null, atMetadata's ttb is assigned to ttb.
/// ttr : Time to refresh of the key. If ttr is null, atMetadata's ttr is assigned to ttr.
/// ccd : Cascade delete. If ccd is null, atMetadata's ccd is assigned to ccd.
AtMetadataBuilder({
String? atSign,
AtMetaData? newAtMetaData,
AtMetaData? existingMetaData,
int? ttl,
int? ttb,
int? ttr,
bool? ccd,
bool? isBinary,
bool? isEncrypted,
String? dataSignature,
String? sharedKeyEncrypted,
String? publicKeyChecksum,
String? encoding,
String? encKeyName,
String? encAlgo,
String? ivNonce,
String? skeEncKeyName,
String? skeEncAlgo,
}) {
AtMetadataBuilder(
{String? atSign,
AtMetaData? newAtMetaData,
AtMetaData? existingMetaData,
int? ttl,
int? ttb,
int? ttr,
bool? ccd,
bool? isBinary,
bool? isEncrypted,
String? dataSignature,
String? sharedKeyEncrypted,
String? publicKeyChecksum,
String? encoding,
String? encKeyName,
String? encAlgo,
String? ivNonce,
String? skeEncKeyName,
String? skeEncAlgo,
PublicKeyHash? publicKeyHash}) {
newAtMetaData ??= AtMetaData();
atMetaData = newAtMetaData;
// createdAt indicates the date and time of the key created.
Expand Down Expand Up @@ -85,6 +86,7 @@ class AtMetadataBuilder {
ivNonce ??= newAtMetaData.ivNonce;
skeEncKeyName ??= newAtMetaData.skeEncKeyName;
skeEncAlgo ??= newAtMetaData.skeEncAlgo;
publicKeyHash ??= newAtMetaData.pubKeyHash;

if (ttl != null && ttl >= 0) {
setTTL(ttl, ttb: ttb);
Expand All @@ -110,6 +112,7 @@ class AtMetadataBuilder {
atMetaData.ivNonce = ivNonce;
atMetaData.skeEncKeyName = skeEncKeyName;
atMetaData.skeEncAlgo = skeEncAlgo;
atMetaData.pubKeyHash = publicKeyHash;
}

void setTTL(int? ttl, {int? ttb}) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
// ignore_for_file: non_constant_identifier_names

import 'package:at_persistence_secondary_server/at_persistence_secondary_server.dart';
import 'package:at_persistence_secondary_server/src/keystore/hive_base.dart';
import 'package:at_utf7/at_utf7.dart';
import 'package:at_utils/at_utils.dart';
import 'package:hive/hive.dart';
import 'package:at_persistence_secondary_server/at_persistence_secondary_server.dart';
import 'package:at_persistence_secondary_server/src/keystore/hive_base.dart';

/// Class to initialize, put and get entries into [AtNotificationKeystore]
class AtNotificationKeystore
Expand Down Expand Up @@ -42,6 +42,9 @@ class AtNotificationKeystore
if (!Hive.isAdapterRegistered(AtMetaDataAdapter().typeId)) {
Hive.registerAdapter(AtMetaDataAdapter());
}
if (!Hive.isAdapterRegistered(PublicKeyHashAdapater().typeId)) {
Hive.registerAdapter(PublicKeyHashAdapater());
}
_register = true;
}
await super.openBox(_boxName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,6 @@ final Map typeAdapterMap = {
'NotificationTypeAdapter': 7,
'OperationTypeAdapter': 8,
'NotificationPriorityAdapter': 9,
'MessageTypeAdapter': 10
'MessageTypeAdapter': 10,
'PublicKeyHashAdapater': 11
};
8 changes: 4 additions & 4 deletions packages/at_persistence_secondary_server/pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: at_persistence_secondary_server
description: A Dart library with the implementation classes for the persistence layer of the secondary server.
version: 3.0.65
version: 3.0.66
repository: https://github.com/atsign-foundation/at_server
homepage: https://docs.atsign.com/

Expand All @@ -14,13 +14,13 @@ dependencies:
crypto: ^3.0.3
uuid: ^3.0.6
at_utf7: ^1.0.0
at_commons: ^5.0.1
at_commons: ^5.0.2
at_utils: ^3.0.19
at_persistence_spec: ^2.0.14
meta: ^1.8.0

dev_dependencies:
lints: ^2.0.1
test: ^1.22.1
lints: ^5.0.0
test: ^1.25.8
coverage: ^1.6.1
collection: ^1.17.1
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,8 @@ Future<void> tearDownFunc() async {
// closes the instance of hive keystore
await SecondaryPersistenceStoreFactory.getInstance()
.getSecondaryPersistenceStore('@test_user_1')!
.getHivePersistenceManager()?.close();
.getHivePersistenceManager()
?.close();

var isExists = await Directory('test/hive/').exists();
if (isExists) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,14 +84,17 @@ void main() async {
..encAlgo = 'AES/CTR/PKCS7Padding'
..ivNonce = 'someIvNonce'
..skeEncKeyName = 'someSkeEncKeyName'
..skeEncAlgo = 'someSkeEncAlgo';
..skeEncAlgo = 'someSkeEncAlgo'
..pubKeyHash = PublicKeyHash('someHashValue', 'sha512');
var atMetaData = AtMetaData.fromCommonsMetadata(commonsMetadata);
atData.metaData = atMetaData;
await keyStore.create(key, atData);

var dataFromHive = await (keyStore.get(key));
expect(dataFromHive?.data, 'india');
expect(dataFromHive?.metaData, atMetaData);
expect(dataFromHive?.metaData?.pubKeyHash?.hash, 'someHashValue');
expect(dataFromHive?.metaData?.pubKeyHash?.hashingAlgo, 'sha512');

var updateData = AtData();
var updateMetaData =
Expand Down

0 comments on commit d0e75f7

Please sign in to comment.