| node0 | 0.000ns | 2025-10-30 11:17:39.467 | 1 | INFO | STARTUP | <main> | StaticPlatformBuilder: | ||
| ////////////////////// // Node is Starting // ////////////////////// | |||||||||
| node0 | 88.000ms | 2025-10-30 11:17:39.555 | 2 | DEBUG | STARTUP | <main> | StaticPlatformBuilder: | main() started {} [com.swirlds.logging.legacy.payload.NodeStartPayload] | |
| node0 | 103.000ms | 2025-10-30 11:17:39.570 | 3 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node0 | 210.000ms | 2025-10-30 11:17:39.677 | 4 | INFO | STARTUP | <main> | Browser: | The following nodes [0] are set to run locally | |
| node0 | 238.000ms | 2025-10-30 11:17:39.705 | 5 | DEBUG | STARTUP | <main> | BootstrapUtils: | Scanning the classpath for RuntimeConstructable classes | |
| node1 | 257.000ms | 2025-10-30 11:17:39.724 | 1 | INFO | STARTUP | <main> | StaticPlatformBuilder: | ||
| ////////////////////// // Node is Starting // ////////////////////// | |||||||||
| node1 | 344.000ms | 2025-10-30 11:17:39.811 | 2 | DEBUG | STARTUP | <main> | StaticPlatformBuilder: | main() started {} [com.swirlds.logging.legacy.payload.NodeStartPayload] | |
| node2 | 357.000ms | 2025-10-30 11:17:39.824 | 1 | INFO | STARTUP | <main> | StaticPlatformBuilder: | ||
| ////////////////////// // Node is Starting // ////////////////////// | |||||||||
| node1 | 359.000ms | 2025-10-30 11:17:39.826 | 3 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node2 | 444.000ms | 2025-10-30 11:17:39.911 | 2 | DEBUG | STARTUP | <main> | StaticPlatformBuilder: | main() started {} [com.swirlds.logging.legacy.payload.NodeStartPayload] | |
| node2 | 460.000ms | 2025-10-30 11:17:39.927 | 3 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node1 | 466.000ms | 2025-10-30 11:17:39.933 | 4 | INFO | STARTUP | <main> | Browser: | The following nodes [1] are set to run locally | |
| node1 | 496.000ms | 2025-10-30 11:17:39.963 | 5 | DEBUG | STARTUP | <main> | BootstrapUtils: | Scanning the classpath for RuntimeConstructable classes | |
| node2 | 566.000ms | 2025-10-30 11:17:40.033 | 4 | INFO | STARTUP | <main> | Browser: | The following nodes [2] are set to run locally | |
| node2 | 595.000ms | 2025-10-30 11:17:40.062 | 5 | DEBUG | STARTUP | <main> | BootstrapUtils: | Scanning the classpath for RuntimeConstructable classes | |
| node0 | 1.390s | 2025-10-30 11:17:40.857 | 6 | DEBUG | STARTUP | <main> | BootstrapUtils: | Done with registerConstructables, time taken 1151ms | |
| node0 | 1.400s | 2025-10-30 11:17:40.867 | 7 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | constructor called in Main. | |
| node0 | 1.404s | 2025-10-30 11:17:40.871 | 8 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node0 | 1.451s | 2025-10-30 11:17:40.918 | 9 | INFO | STARTUP | <main> | PrometheusEndpoint: | PrometheusEndpoint: Starting server listing on port: 9999 | |
| node0 | 1.535s | 2025-10-30 11:17:41.002 | 10 | WARN | STARTUP | <main> | CryptoStatic: | There are no keys on disk, Adhoc keys will be generated, but this is incompatible with DAB. | |
| node0 | 1.537s | 2025-10-30 11:17:41.004 | 11 | DEBUG | STARTUP | <main> | CryptoStatic: | Started generating keys | |
| node2 | 1.738s | 2025-10-30 11:17:41.205 | 6 | DEBUG | STARTUP | <main> | BootstrapUtils: | Done with registerConstructables, time taken 1142ms | |
| node2 | 1.760s | 2025-10-30 11:17:41.227 | 7 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | constructor called in Main. | |
| node2 | 1.766s | 2025-10-30 11:17:41.233 | 8 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node1 | 1.771s | 2025-10-30 11:17:41.238 | 6 | DEBUG | STARTUP | <main> | BootstrapUtils: | Done with registerConstructables, time taken 1274ms | |
| node1 | 1.780s | 2025-10-30 11:17:41.247 | 7 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | constructor called in Main. | |
| node1 | 1.783s | 2025-10-30 11:17:41.250 | 8 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node2 | 1.814s | 2025-10-30 11:17:41.281 | 9 | INFO | STARTUP | <main> | PrometheusEndpoint: | PrometheusEndpoint: Starting server listing on port: 9999 | |
| node1 | 1.820s | 2025-10-30 11:17:41.287 | 9 | INFO | STARTUP | <main> | PrometheusEndpoint: | PrometheusEndpoint: Starting server listing on port: 9999 | |
| node2 | 1.878s | 2025-10-30 11:17:41.345 | 10 | WARN | STARTUP | <main> | CryptoStatic: | There are no keys on disk, Adhoc keys will be generated, but this is incompatible with DAB. | |
| node2 | 1.878s | 2025-10-30 11:17:41.345 | 11 | DEBUG | STARTUP | <main> | CryptoStatic: | Started generating keys | |
| node1 | 1.890s | 2025-10-30 11:17:41.357 | 10 | WARN | STARTUP | <main> | CryptoStatic: | There are no keys on disk, Adhoc keys will be generated, but this is incompatible with DAB. | |
| node1 | 1.892s | 2025-10-30 11:17:41.359 | 11 | DEBUG | STARTUP | <main> | CryptoStatic: | Started generating keys | |
| node3 | 2.280s | 2025-10-30 11:17:41.747 | 1 | INFO | STARTUP | <main> | StaticPlatformBuilder: | ||
| ////////////////////// // Node is Starting // ////////////////////// | |||||||||
| node3 | 2.369s | 2025-10-30 11:17:41.836 | 2 | DEBUG | STARTUP | <main> | StaticPlatformBuilder: | main() started {} [com.swirlds.logging.legacy.payload.NodeStartPayload] | |
| node3 | 2.386s | 2025-10-30 11:17:41.853 | 3 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node3 | 2.496s | 2025-10-30 11:17:41.963 | 4 | INFO | STARTUP | <main> | Browser: | The following nodes [3] are set to run locally | |
| node3 | 2.527s | 2025-10-30 11:17:41.994 | 5 | DEBUG | STARTUP | <main> | BootstrapUtils: | Scanning the classpath for RuntimeConstructable classes | |
| node4 | 2.713s | 2025-10-30 11:17:42.180 | 1 | INFO | STARTUP | <main> | StaticPlatformBuilder: | ||
| ////////////////////// // Node is Starting // ////////////////////// | |||||||||
| node4 | 2.829s | 2025-10-30 11:17:42.296 | 2 | DEBUG | STARTUP | <main> | StaticPlatformBuilder: | main() started {} [com.swirlds.logging.legacy.payload.NodeStartPayload] | |
| node4 | 2.848s | 2025-10-30 11:17:42.315 | 3 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node4 | 2.992s | 2025-10-30 11:17:42.459 | 4 | INFO | STARTUP | <main> | Browser: | The following nodes [4] are set to run locally | |
| node4 | 3.028s | 2025-10-30 11:17:42.495 | 5 | DEBUG | STARTUP | <main> | BootstrapUtils: | Scanning the classpath for RuntimeConstructable classes | |
| node0 | 3.528s | 2025-10-30 11:17:42.995 | 12 | DEBUG | STARTUP | <main> | CryptoStatic: | Done generating keys | |
| node0 | 3.629s | 2025-10-30 11:17:43.096 | 15 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node0 | 3.632s | 2025-10-30 11:17:43.099 | 16 | INFO | STARTUP | <main> | StartupStateUtils: | No saved states were found on disk. | |
| node0 | 3.667s | 2025-10-30 11:17:43.134 | 21 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | New State Constructed. | |
| node2 | 3.899s | 2025-10-30 11:17:43.366 | 12 | DEBUG | STARTUP | <main> | CryptoStatic: | Done generating keys | |
| node1 | 3.904s | 2025-10-30 11:17:43.371 | 12 | DEBUG | STARTUP | <main> | CryptoStatic: | Done generating keys | |
| node3 | 3.910s | 2025-10-30 11:17:43.377 | 6 | DEBUG | STARTUP | <main> | BootstrapUtils: | Done with registerConstructables, time taken 1383ms | |
| node3 | 3.919s | 2025-10-30 11:17:43.386 | 7 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | constructor called in Main. | |
| node3 | 3.926s | 2025-10-30 11:17:43.393 | 8 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node3 | 3.975s | 2025-10-30 11:17:43.442 | 9 | INFO | STARTUP | <main> | PrometheusEndpoint: | PrometheusEndpoint: Starting server listing on port: 9999 | |
| node2 | 3.983s | 2025-10-30 11:17:43.450 | 15 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node2 | 3.985s | 2025-10-30 11:17:43.452 | 16 | INFO | STARTUP | <main> | StartupStateUtils: | No saved states were found on disk. | |
| node1 | 3.991s | 2025-10-30 11:17:43.458 | 15 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node1 | 3.993s | 2025-10-30 11:17:43.460 | 16 | INFO | STARTUP | <main> | StartupStateUtils: | No saved states were found on disk. | |
| node2 | 4.017s | 2025-10-30 11:17:43.484 | 21 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | New State Constructed. | |
| node1 | 4.026s | 2025-10-30 11:17:43.493 | 21 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | New State Constructed. | |
| node3 | 4.038s | 2025-10-30 11:17:43.505 | 10 | WARN | STARTUP | <main> | CryptoStatic: | There are no keys on disk, Adhoc keys will be generated, but this is incompatible with DAB. | |
| node3 | 4.040s | 2025-10-30 11:17:43.507 | 11 | DEBUG | STARTUP | <main> | CryptoStatic: | Started generating keys | |
| node0 | 4.455s | 2025-10-30 11:17:43.922 | 24 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node0 | 4.457s | 2025-10-30 11:17:43.924 | 27 | INFO | STARTUP | <main> | BootstrapUtils: | Not upgrading software, current software is version SemanticVersion[major=1, minor=0, patch=0, pre=, build=]. | |
| node0 | 4.464s | 2025-10-30 11:17:43.931 | 28 | INFO | STARTUP | <main> | AddressBookInitializer: | Starting from genesis: using the config address book. | |
| node0 | 4.474s | 2025-10-30 11:17:43.941 | 29 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node0 | 4.477s | 2025-10-30 11:17:43.944 | 30 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node2 | 4.748s | 2025-10-30 11:17:44.215 | 24 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 4.748s | 2025-10-30 11:17:44.215 | 6 | DEBUG | STARTUP | <main> | BootstrapUtils: | Done with registerConstructables, time taken 1719ms | |
| node2 | 4.749s | 2025-10-30 11:17:44.216 | 26 | INFO | STARTUP | <main> | BootstrapUtils: | Not upgrading software, current software is version SemanticVersion[major=1, minor=0, patch=0, pre=, build=]. | |
| node2 | 4.755s | 2025-10-30 11:17:44.222 | 28 | INFO | STARTUP | <main> | AddressBookInitializer: | Starting from genesis: using the config address book. | |
| node4 | 4.758s | 2025-10-30 11:17:44.225 | 7 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | constructor called in Main. | |
| node4 | 4.762s | 2025-10-30 11:17:44.229 | 8 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node2 | 4.764s | 2025-10-30 11:17:44.231 | 29 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node2 | 4.766s | 2025-10-30 11:17:44.233 | 30 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node1 | 4.779s | 2025-10-30 11:17:44.246 | 24 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node1 | 4.780s | 2025-10-30 11:17:44.247 | 26 | INFO | STARTUP | <main> | BootstrapUtils: | Not upgrading software, current software is version SemanticVersion[major=1, minor=0, patch=0, pre=, build=]. | |
| node1 | 4.785s | 2025-10-30 11:17:44.252 | 28 | INFO | STARTUP | <main> | AddressBookInitializer: | Starting from genesis: using the config address book. | |
| node1 | 4.794s | 2025-10-30 11:17:44.261 | 29 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node1 | 4.796s | 2025-10-30 11:17:44.263 | 30 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 4.806s | 2025-10-30 11:17:44.273 | 9 | INFO | STARTUP | <main> | PrometheusEndpoint: | PrometheusEndpoint: Starting server listing on port: 9999 | |
| node4 | 4.875s | 2025-10-30 11:17:44.342 | 10 | WARN | STARTUP | <main> | CryptoStatic: | There are no keys on disk, Adhoc keys will be generated, but this is incompatible with DAB. | |
| node4 | 4.876s | 2025-10-30 11:17:44.343 | 11 | DEBUG | STARTUP | <main> | CryptoStatic: | Started generating keys | |
| node0 | 5.586s | 2025-10-30 11:17:45.053 | 31 | INFO | STARTUP | <main> | OSHealthChecker: | ||
| PASSED - Clock Source Speed Check Report[callsPerSec=26429000] PASSED - Entropy Check Report[success=true, entropySource=Strong Instance, elapsedNanos=266390, randomLong=-286727679921486005, exception=null] PASSED - Entropy Check Report[success=true, entropySource=NativePRNGBlocking:SUN, elapsedNanos=10740, randomLong=2620747506944970357, exception=null] PASSED - File System Check Report[code=SUCCESS, readNanos=1398580, data=35, exception=null] OS Health Check Report - Complete (took 1022 ms) | |||||||||
| node0 | 5.615s | 2025-10-30 11:17:45.082 | 32 | DEBUG | STARTUP | <main> | BootstrapUtils: | jvmPauseDetectorThread started | |
| node0 | 5.623s | 2025-10-30 11:17:45.090 | 33 | INFO | STARTUP | <main> | StandardScratchpad: | Scratchpad platform.iss contents: | |
| LAST_ISS_ROUND null | |||||||||
| node0 | 5.624s | 2025-10-30 11:17:45.091 | 34 | INFO | STARTUP | <main> | PlatformBuilder: | Default platform pool parallelism: 8 | |
| node0 | 5.709s | 2025-10-30 11:17:45.176 | 35 | INFO | STARTUP | <main> | SwirldsPlatform: | Starting with roster history: | |
| RosterHistory[ currentRosterRound: 0 ][ no previous roster set ] Current Roster: { "rosterEntries": [{ "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAK05TS8KZeb1MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTEwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBoP9dI3K1PRLRK7h90D9eNCfgzuHTyJi70yDEs90XJXlE6jmgf1NE2av83VAhQHLxu8Ehc/55M9Ayx9IQc0zJLSS+IrRM9QwqoG8ZvNdRgNw+je3V/8rAK/mHId+cPnnyDplCyskyi5kWCv6kTULIewFH8/KVZwhe0/hB2+N6ujWixURrxjjGLHA6b2gPoGAb/nxiVOn+L0cWcOzcyiYShxagj0FBWV7AxKx65Ynzfe7eF0gOzBUA+IM10OM5KXJejk53Xz5KpEyGe8htO/bXFlpLdm3UzrYiIhY0oKPYKECAC1s+VAZA6i+MV0nDpqDgxHRRXD8O2arauPhEI6iVT9f05AtzElrs7U95HbpQUuP1sxkaQw+bLdMOQHHMVCgMgw2g0eDdVDAMJD7wjZ+Bs6kDc/EJELb0l1uy2GEnOZMiHkK4K1r4IyZ/ed6QpyIRKfBCNyT5IIpMoVpzRYxVXgjgFdudd8iErKyvSXHThU6nu92c+vSd+FLBFHPpb6ECAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdga5NYtV48uDCd4vIsmpGWpKuUHtDVDlCvzHc2ij8DxAR6OFp+hIRNEBXkzg1KS5qP8Wba5ptmGoV4f89HemP+AL3Azde+HjpYRtffdfTdQwmMbw7xJg2lKkEo11gDo5+zPZnVbfb3FsZ+IXKji0QshQBfg+ddTkFG3TJG1ttq3ZDw94RxFQivVnkj1p+Ogel/DuBNRWQobFVe5VrmJqbuwwN8AdrPae1dMrkZatF91On5+cpVLGfk96fYUhDohDt6KKQ6DdhvFk5rhd0vsHGMQq2gAW2+Or6ZVsKkHKx8CPINpJVKAdpE0tItI+loMO02jf9oRI/8cThWP1vNAeWnr0D6m275EZf/4qem/DdJ0FJIVou3P7tsq7eSdueDnj5RmcbW/vOBtvlXpD3SqsVRn6sltZ0sk24p+6ZMzopevCZEMf/nL3OzGvSadisXb39H9DgwkNLlefju1QLgHWf0TGfeNHluDgVDhU8+/1/KUGtr2SnZ5EVO1l59FWHALj", "gossipEndpoint": [{ "ipAddressV4": "I95wBg==", "port": 30124 }, { "ipAddressV4": "CoAAAw==", "port": 30124 }] }, { "nodeId": "1", "weight": "12500000000", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIIHWg7e2Q/smQwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlMjAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlMjCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKr5WsBepS3+y/0/yfBjzMWje7zianEz7sszrNWV3cGu2KUlR7v2+9wp/EtX1+BdcGlTTojgFs5nEBN4lM76Cp6JjFH461yN8GSkIkpe8GZnb1w4KEjZj5UYMbq+qOUI6QmwmgLeO8RHAsS6lCP1AyGFalb2ZVJ09DcYDxCRXeFj4BqvNbtD5r5DTCtpVT4ax3eb3pzNSGsjQUG9zhyp/WcsAmwmzKdMl72tk6qF8tlAWXyzwiCujWHS0Kln0C5pyEjeFNsG299toC4pgT8juxijgseTeIFRnNHmGSeSmXpAkEELlwLKR8HOnqeiS5UXNqdbxNemx/EpJSc5rTB6kzLX24dIuRsgyIIFWx73goOzmaHUolN4xmenifoMYlSNNM07WrsvmjRC5OLc/uGhdWqhZGBCH6AJB8Cmw84QLXVdHE6LiueP1oMd7g++N4X880wJkuh0ebfV3i7etUIn0jLlM50AkRucG9kwZDJ/M4LY7FT2F85R1/o2FaB/537ARQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQB5lTkqYw0hEW+BJTFsQ8jEHfIDNRJ0kNbVuibfP+u7kzlJy15lCEi+Qw6E3d8hA1QBX3xJMxNBlrtYPrdG26hh/tOwo5Np/OfxQC5jo0Q7n7hu7aLxZRUB/q7AfdDbOun4Za6rJhT3+EsFocyARWp8bYSk3YILBMkP+2VYDRkgQidzKgKtO5yv21Y9sEgziSprc+dQb/tqn5aQZLWavFwCLwnB3t4r4qwLHkkH00Jw51uOvLeM49/t333V5Caa7wmWzMcE+KSWW0QWFRxeJrodSyjPdmDi4D8lKN5WJHSAU5L2yWIODUyWD/cvsAapTv7xXk9ja/Ssb9DpMQnM1xh0hYaESajNeL1QbGuZgPxAwrw981h7kprR2P2iMGRVGA6u4ezxmhW3s7D+yJ3+Yxs/x2J/sw65Z16mRYXRWYWHQmhgaVQjIviiAkVB6CWZo1kHl/eYaVedQzKlrTpbr3JtmwGwhYEOnrkzsC63h8/AG9gRtIAIGWGqTPWbn2pEm8M=", "gossipEndpoint": [{ "ipAddressV4": "I+g9lA==", "port": 30125 }, { "ipAddressV4": "CoAAIw==", "port": 30125 }] }, { "nodeId": "2", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAJg3GRFp5bT9MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTMwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQCl5ut2dCleDmgEneRYpAKa9Pe2qnXzgF+BEIuTfizG2OcPQi/ltv+6HxSrJXtuWNaiX/G4iP7iBzWj2ysaAYwfYj0ezTSMLRqM9hXzVgLtW0LJEF6a8vUXPsJt4GEJkUKiYCCO1MP1NLd3y/3SVJrFhwJSPqKYm2pQNg84WfPDWSkzSneOIO4Z0uWDXgs+vzSNyChWOxVieFQhLjcELtyj6narmLox+Jdo/SxUzPuktuFB3ebNgUqWPkjljgZpl00BTmbRIVHgHfDVulo2PBpXd0VplIDgdPr5zMKdTrKCuDKey8Mft72RkPKMe9LZVZ/21+rXVEh+olvvUCySsP2RkWPUJJD90c8wKo01rZsjAOXscJKQcBYlam5XXO4ZBRYzEdxuivbkPwsOoQ83swCR3alPvwfbg11Va+zXE6sRbUM9LqkYo/M3Hwg8tSIXu8oah6csputanz867dzWwyVJEPzmiXZ6ncVDQO31QlB7RndWCqKTjOQpnpblUMsrE9MCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAnUA8+kz7L+eSOm/iVvUNYF10PKO2nZtxWWL7R1vwK/2Up765PwqxKb0eSEM4bjgvZq1GuGXs9X/Y7dos42yntXvgeUY+/2JzCnw4J5tzxytZ+IKX6DR67NjDzDzVZQfptjLQrb8E7yzml0uxsqrhNPWl57Bmfe66Kg2lD11jImeeEhExlRggFukoiUWVwRNU21Q1jMUWrg2ZwfP+6fFTgRt0WR+X5zkyYPbvI6/yv7reYGjPDuZTOFhbwG8LUTQxdttDswPjnQ606kMyninL+aNelSdV/UIII7lpr/dTvgQAnrlBaGXvdy6brh3wWEwia0FZFZcKEs6M+jZ3MrFxvlTfUIdI3jRq12L10cCDi2VhORg4JmvlM+Tk6kJeSku30ZLAVo3S7GbTdvkuesOxz3UwnF7yfOA1KYOPvhv1oLxGV5z05glsn1OBKnXMdzsKFbAYYHj81bgBni2WLuIpv3oXlai2uc4y9m8LvWAQ+h/ivyog34Ai3Pvr5ZZOFgjy", "gossipEndpoint": [{ "ipAddressV4": "Iir4zA==", "port": 30126 }, { "ipAddressV4": "CoAAHA==", "port": 30126 }] }, { "nodeId": "3", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAN7hww13zBZEMA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTQwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTQwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDK/bVyv0ZUeJZ4cIOImM+wmqtYjCw4jPAC549WQPPV1vG0lzSpgV+nRKqmWBexhLlKN3bsvrfNCUpKSq8meFyCtdppT1dhUOmEZcoNhLZzqxXb2HYYqRPv82tR+tbh+27WFsBOOqYrYTvr72ECD7qDOuw/Xob6KImaw/b/SIAPecMoYy25fkgYkJSETwd8HUpwssYH/JTLBF8eGjjTTMuu14ARQKeH8BXSs+jjV1+3IItXERS8ryUGDjqc5vC8ZW1kDVQbb91IDxRjqZbFyhuasocCqTAcZuiEgE8Wilwp2g1vbAUnHnvKNfiaEAHoEV6vF4lelaWhOnN2U5tnox/ns6PiDqIbOfs0pmXxjAK0vxc6oZM3TwdRtzo6cSb/AYfQdnmQzkra980kHN12r3f7PK2PzGBuVUPT7fLGA4S3vQDYO4rqcgTc/OLobtqLtdBusOFjZscfIfUW4GVWJUI1j+fwvHacxWLmyZwlQ5Q47UtrtjWpFru7CTn5S477lqMCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdW6AWDhT0eOJw+0O6MYngmCgkXfFsgBC/B1plaE596hHo58FHxzCNiLFdvfRj37rxujvqsDAkADWUmOzLzLHYMXu302HzDqAMNY6FZJc32y4ZDsIQpaUOAuiNHAHwFXuPRInVpCqztfJMgw4RhOhcCTEsoIJsqoIN1t4M0pEVAv6x3nJwFKZqSNOZrQ7sOW32FjwWS3kHwRsCTtqdk5n2KxU6wr/fggV3QsSPRMYro8sUfwu93mqggtswwWqfeKlsz5WiaR9aqLnb8z1R6HLvA0bcoPWzjgn8RdP+9we4z06iZ5vdBuNpwBjrCKUELWISyAoekLGGxyS8pPqYiSBRNUoaPITSuUjcCBbJ9EFvm72QgCBesbwF71KPabTPbMPhLmf+uAi+zmeu8ZeVvT6DrX9OHSkIvIEQFry9BrqOT3ce6KBHSO1HpXIetj5Wcd3WHXtz9ulBL9ikWC8eh7/+we51ucmLvFzNKznElhT2Dp+czXUVNEUjp3u/66pyRA4", "gossipEndpoint": [{ "ipAddressV4": "IqyjSg==", "port": 30127 }, { "ipAddressV4": "CoAAIA==", "port": 30127 }] }, { "nodeId": "4", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIId2RXuetTjnAwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlNTAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlNTCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKw8NsPZVKyXW3smNAUcoN/JOyhyJah6Y2gPOCek6hcOaLjg6DvZxLzinpLKwScrgRIIiuU/9hUsy6EiOtCuX3lT4ByKA5XdN1YQAXL1TS00vUf7BR1b+PW58gaJtepZctHvyklNxFeVT/vq2zWQa1sXeTz0OReJXO+8WWO1AjciYyv963zZ6jvk5yhWGukC5NJ2pJTjFh3+2+PivnLevNNnW6bZkdgSl3MThr78nsWlUQykvx+FNIgLlrq+4fCIFzXMeJRRXqo0MJlsxBfQaSu1arqMGE5BQWBEr51EH9UKNP3MSEntr1h1WMeJ3tZXFJ/z1xzKxsVII8HNtjynv1MoEGY4AQDUWI0CRUBv0uAmBUljGJVyqgHIPO6OGznJdkQsmSOSpqdVeNSWEd85Nh0Niorvpat9g5vUJ9zWKPWmgiww+RQiqq8jA2BiD/3n+I8UIMbMShiJUQBmnveAEWLMeV6TIuIBllGQ0a5oB+PQOZh0g+TvsjnH8/c1h1MSHQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQBYPcTDNQjigDZ2iY/nA9BhiyNJDHQBecLoADQ+5mxGrGclUcvd002ovbVhoGlcMkVyG/Am9/Y3t6zpO4pMQy7Jecfx8U8+VtKEPFkpfslCmEHT9EpnCGcS5t1KB2KcqnZ57f9uUZLBtMPem1FO6wcT71TXr4/+hGNuLpLmtKkQWARnVURR1/MHhcJ35BH1/x1VIeNc+PBpTE/blszn69Gwqt/HlpNTnYfqS0vhTCfOO07dxn8n904xb1nZ0l0k7pCQdRTTn+dYxmvQ7MlTRK5iOlOKIiRAbD8Fwyfo93cvDj6V56c7Gz+knyjz0iARMsptumqevmfiJ324HzV/12SukJokFDl9G6MjG99ucg3CQaIxa5kRVN1b5D+QMeowfj0lKTchGm/BbuO4zousIE+aWCGfy41CccTP11JW2quwjsVGufb5fvCfDXop5f9i1+95oHd2JyLmHDnJBWEqBHret4Dx8P8GyQhyZB6jkZpVwJy/ruPyrAL3kcKqUZecaOg=", "gossipEndpoint": [{ "ipAddressV4": "IhzSrg==", "port": 30128 }, { "ipAddressV4": "CoAAHQ==", "port": 30128 }] }] } | |||||||||
| node0 | 5.733s | 2025-10-30 11:17:45.200 | 36 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/0/ConsistencyTestLog.csv | |
| node0 | 5.734s | 2025-10-30 11:17:45.201 | 37 | INFO | STARTUP | <main> | TransactionHandlingHistory: | No log file found. Starting without any previous history | |
| node0 | 5.745s | 2025-10-30 11:17:45.212 | 38 | INFO | STARTUP | <main> | StateInitializer: | The platform is using the following initial state: | |
| Round: 0 Timestamp: 1970-01-01T00:00:00Z Next consensus number: 0 Legacy running event hash: null Legacy running event mnemonic: null Rounds non-ancient: 0 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1 Root hash: 50e1f7af3dbb06725582ec8e0fa7008684f3548b84c370837717fad94446293af8830cf44bf0cbc6976fbedeb97017bf (root) VirtualMap state / bring-any-resist-soft | |||||||||
| node0 | 5.749s | 2025-10-30 11:17:45.216 | 40 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Starting the ReconnectController | |
| node2 | 5.880s | 2025-10-30 11:17:45.347 | 31 | INFO | STARTUP | <main> | OSHealthChecker: | ||
| PASSED - Clock Source Speed Check Report[callsPerSec=26302899] PASSED - Entropy Check Report[success=true, entropySource=Strong Instance, elapsedNanos=179131, randomLong=8962630178631541604, exception=null] PASSED - Entropy Check Report[success=true, entropySource=NativePRNGBlocking:SUN, elapsedNanos=11270, randomLong=-5580822213163821351, exception=null] PASSED - File System Check Report[code=SUCCESS, readNanos=1336641, data=35, exception=null] OS Health Check Report - Complete (took 1023 ms) | |||||||||
| node2 | 5.910s | 2025-10-30 11:17:45.377 | 32 | DEBUG | STARTUP | <main> | BootstrapUtils: | jvmPauseDetectorThread started | |
| node1 | 5.913s | 2025-10-30 11:17:45.380 | 31 | INFO | STARTUP | <main> | OSHealthChecker: | ||
| PASSED - Clock Source Speed Check Report[callsPerSec=26230186] PASSED - Entropy Check Report[success=true, entropySource=Strong Instance, elapsedNanos=222171, randomLong=-4228950205947661281, exception=null] PASSED - Entropy Check Report[success=true, entropySource=NativePRNGBlocking:SUN, elapsedNanos=9030, randomLong=293473989985727425, exception=null] PASSED - File System Check Report[code=SUCCESS, readNanos=1316301, data=35, exception=null] OS Health Check Report - Complete (took 1022 ms) | |||||||||
| node2 | 5.918s | 2025-10-30 11:17:45.385 | 33 | INFO | STARTUP | <main> | StandardScratchpad: | Scratchpad platform.iss contents: | |
| LAST_ISS_ROUND null | |||||||||
| node2 | 5.919s | 2025-10-30 11:17:45.386 | 34 | INFO | STARTUP | <main> | PlatformBuilder: | Default platform pool parallelism: 8 | |
| node1 | 5.941s | 2025-10-30 11:17:45.408 | 32 | DEBUG | STARTUP | <main> | BootstrapUtils: | jvmPauseDetectorThread started | |
| node1 | 5.949s | 2025-10-30 11:17:45.416 | 33 | INFO | STARTUP | <main> | StandardScratchpad: | Scratchpad platform.iss contents: | |
| LAST_ISS_ROUND null | |||||||||
| node1 | 5.950s | 2025-10-30 11:17:45.417 | 34 | INFO | STARTUP | <main> | PlatformBuilder: | Default platform pool parallelism: 8 | |
| node0 | 5.974s | 2025-10-30 11:17:45.441 | 41 | INFO | EVENT_STREAM | <main> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: 38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b | |
| node0 | 5.979s | 2025-10-30 11:17:45.446 | 42 | INFO | STARTUP | <platformForkJoinThread-2> | Shadowgraph: | Shadowgraph starting from expiration threshold 1 | |
| node0 | 5.983s | 2025-10-30 11:17:45.450 | 43 | INFO | STARTUP | <<start-node-0>> | ConsistencyTestingToolMain: | init called in Main for node 0. | |
| node0 | 5.984s | 2025-10-30 11:17:45.451 | 44 | INFO | STARTUP | <<start-node-0>> | SwirldsPlatform: | Starting platform 0 | |
| node0 | 5.985s | 2025-10-30 11:17:45.452 | 45 | INFO | STARTUP | <<platform: recycle-bin-cleanup>> | RecycleBinImpl: | Deleted 0 files from the recycle bin. | |
| node0 | 5.989s | 2025-10-30 11:17:45.456 | 46 | INFO | STARTUP | <<start-node-0>> | CycleFinder: | No cyclical back pressure detected in wiring model. | |
| node0 | 5.990s | 2025-10-30 11:17:45.457 | 47 | INFO | STARTUP | <<start-node-0>> | DirectSchedulerChecks: | No illegal direct scheduler use detected in the wiring model. | |
| node0 | 5.990s | 2025-10-30 11:17:45.457 | 48 | INFO | STARTUP | <<start-node-0>> | InputWireChecks: | All input wires have been bound. | |
| node0 | 5.992s | 2025-10-30 11:17:45.459 | 49 | WARN | STARTUP | <<start-node-0>> | PcesFileTracker: | No preconsensus event files available | |
| node0 | 5.993s | 2025-10-30 11:17:45.460 | 50 | INFO | STARTUP | <<start-node-0>> | SwirldsPlatform: | replaying preconsensus event stream starting at 0 | |
| node0 | 5.994s | 2025-10-30 11:17:45.461 | 51 | INFO | STARTUP | <<start-node-0>> | PcesReplayer: | Replayed 0 preconsensus events with max birth round -1. These events contained 0 transactions. 0 rounds reached consensus spanning 0.0 nanoseconds of consensus time. The latest round to reach consensus is round 0. Replay took 0.0 nanoseconds. | |
| node0 | 5.995s | 2025-10-30 11:17:45.462 | 52 | INFO | STARTUP | <<app: appMain 0>> | ConsistencyTestingToolMain: | run called in Main. | |
| node2 | 5.998s | 2025-10-30 11:17:45.465 | 35 | INFO | STARTUP | <main> | SwirldsPlatform: | Starting with roster history: | |
| RosterHistory[ currentRosterRound: 0 ][ no previous roster set ] Current Roster: { "rosterEntries": [{ "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAK05TS8KZeb1MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTEwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBoP9dI3K1PRLRK7h90D9eNCfgzuHTyJi70yDEs90XJXlE6jmgf1NE2av83VAhQHLxu8Ehc/55M9Ayx9IQc0zJLSS+IrRM9QwqoG8ZvNdRgNw+je3V/8rAK/mHId+cPnnyDplCyskyi5kWCv6kTULIewFH8/KVZwhe0/hB2+N6ujWixURrxjjGLHA6b2gPoGAb/nxiVOn+L0cWcOzcyiYShxagj0FBWV7AxKx65Ynzfe7eF0gOzBUA+IM10OM5KXJejk53Xz5KpEyGe8htO/bXFlpLdm3UzrYiIhY0oKPYKECAC1s+VAZA6i+MV0nDpqDgxHRRXD8O2arauPhEI6iVT9f05AtzElrs7U95HbpQUuP1sxkaQw+bLdMOQHHMVCgMgw2g0eDdVDAMJD7wjZ+Bs6kDc/EJELb0l1uy2GEnOZMiHkK4K1r4IyZ/ed6QpyIRKfBCNyT5IIpMoVpzRYxVXgjgFdudd8iErKyvSXHThU6nu92c+vSd+FLBFHPpb6ECAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdga5NYtV48uDCd4vIsmpGWpKuUHtDVDlCvzHc2ij8DxAR6OFp+hIRNEBXkzg1KS5qP8Wba5ptmGoV4f89HemP+AL3Azde+HjpYRtffdfTdQwmMbw7xJg2lKkEo11gDo5+zPZnVbfb3FsZ+IXKji0QshQBfg+ddTkFG3TJG1ttq3ZDw94RxFQivVnkj1p+Ogel/DuBNRWQobFVe5VrmJqbuwwN8AdrPae1dMrkZatF91On5+cpVLGfk96fYUhDohDt6KKQ6DdhvFk5rhd0vsHGMQq2gAW2+Or6ZVsKkHKx8CPINpJVKAdpE0tItI+loMO02jf9oRI/8cThWP1vNAeWnr0D6m275EZf/4qem/DdJ0FJIVou3P7tsq7eSdueDnj5RmcbW/vOBtvlXpD3SqsVRn6sltZ0sk24p+6ZMzopevCZEMf/nL3OzGvSadisXb39H9DgwkNLlefju1QLgHWf0TGfeNHluDgVDhU8+/1/KUGtr2SnZ5EVO1l59FWHALj", "gossipEndpoint": [{ "ipAddressV4": "I95wBg==", "port": 30124 }, { "ipAddressV4": "CoAAAw==", "port": 30124 }] }, { "nodeId": "1", "weight": "12500000000", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIIHWg7e2Q/smQwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlMjAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlMjCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKr5WsBepS3+y/0/yfBjzMWje7zianEz7sszrNWV3cGu2KUlR7v2+9wp/EtX1+BdcGlTTojgFs5nEBN4lM76Cp6JjFH461yN8GSkIkpe8GZnb1w4KEjZj5UYMbq+qOUI6QmwmgLeO8RHAsS6lCP1AyGFalb2ZVJ09DcYDxCRXeFj4BqvNbtD5r5DTCtpVT4ax3eb3pzNSGsjQUG9zhyp/WcsAmwmzKdMl72tk6qF8tlAWXyzwiCujWHS0Kln0C5pyEjeFNsG299toC4pgT8juxijgseTeIFRnNHmGSeSmXpAkEELlwLKR8HOnqeiS5UXNqdbxNemx/EpJSc5rTB6kzLX24dIuRsgyIIFWx73goOzmaHUolN4xmenifoMYlSNNM07WrsvmjRC5OLc/uGhdWqhZGBCH6AJB8Cmw84QLXVdHE6LiueP1oMd7g++N4X880wJkuh0ebfV3i7etUIn0jLlM50AkRucG9kwZDJ/M4LY7FT2F85R1/o2FaB/537ARQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQB5lTkqYw0hEW+BJTFsQ8jEHfIDNRJ0kNbVuibfP+u7kzlJy15lCEi+Qw6E3d8hA1QBX3xJMxNBlrtYPrdG26hh/tOwo5Np/OfxQC5jo0Q7n7hu7aLxZRUB/q7AfdDbOun4Za6rJhT3+EsFocyARWp8bYSk3YILBMkP+2VYDRkgQidzKgKtO5yv21Y9sEgziSprc+dQb/tqn5aQZLWavFwCLwnB3t4r4qwLHkkH00Jw51uOvLeM49/t333V5Caa7wmWzMcE+KSWW0QWFRxeJrodSyjPdmDi4D8lKN5WJHSAU5L2yWIODUyWD/cvsAapTv7xXk9ja/Ssb9DpMQnM1xh0hYaESajNeL1QbGuZgPxAwrw981h7kprR2P2iMGRVGA6u4ezxmhW3s7D+yJ3+Yxs/x2J/sw65Z16mRYXRWYWHQmhgaVQjIviiAkVB6CWZo1kHl/eYaVedQzKlrTpbr3JtmwGwhYEOnrkzsC63h8/AG9gRtIAIGWGqTPWbn2pEm8M=", "gossipEndpoint": [{ "ipAddressV4": "I+g9lA==", "port": 30125 }, { "ipAddressV4": "CoAAIw==", "port": 30125 }] }, { "nodeId": "2", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAJg3GRFp5bT9MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTMwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQCl5ut2dCleDmgEneRYpAKa9Pe2qnXzgF+BEIuTfizG2OcPQi/ltv+6HxSrJXtuWNaiX/G4iP7iBzWj2ysaAYwfYj0ezTSMLRqM9hXzVgLtW0LJEF6a8vUXPsJt4GEJkUKiYCCO1MP1NLd3y/3SVJrFhwJSPqKYm2pQNg84WfPDWSkzSneOIO4Z0uWDXgs+vzSNyChWOxVieFQhLjcELtyj6narmLox+Jdo/SxUzPuktuFB3ebNgUqWPkjljgZpl00BTmbRIVHgHfDVulo2PBpXd0VplIDgdPr5zMKdTrKCuDKey8Mft72RkPKMe9LZVZ/21+rXVEh+olvvUCySsP2RkWPUJJD90c8wKo01rZsjAOXscJKQcBYlam5XXO4ZBRYzEdxuivbkPwsOoQ83swCR3alPvwfbg11Va+zXE6sRbUM9LqkYo/M3Hwg8tSIXu8oah6csputanz867dzWwyVJEPzmiXZ6ncVDQO31QlB7RndWCqKTjOQpnpblUMsrE9MCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAnUA8+kz7L+eSOm/iVvUNYF10PKO2nZtxWWL7R1vwK/2Up765PwqxKb0eSEM4bjgvZq1GuGXs9X/Y7dos42yntXvgeUY+/2JzCnw4J5tzxytZ+IKX6DR67NjDzDzVZQfptjLQrb8E7yzml0uxsqrhNPWl57Bmfe66Kg2lD11jImeeEhExlRggFukoiUWVwRNU21Q1jMUWrg2ZwfP+6fFTgRt0WR+X5zkyYPbvI6/yv7reYGjPDuZTOFhbwG8LUTQxdttDswPjnQ606kMyninL+aNelSdV/UIII7lpr/dTvgQAnrlBaGXvdy6brh3wWEwia0FZFZcKEs6M+jZ3MrFxvlTfUIdI3jRq12L10cCDi2VhORg4JmvlM+Tk6kJeSku30ZLAVo3S7GbTdvkuesOxz3UwnF7yfOA1KYOPvhv1oLxGV5z05glsn1OBKnXMdzsKFbAYYHj81bgBni2WLuIpv3oXlai2uc4y9m8LvWAQ+h/ivyog34Ai3Pvr5ZZOFgjy", "gossipEndpoint": [{ "ipAddressV4": "Iir4zA==", "port": 30126 }, { "ipAddressV4": "CoAAHA==", "port": 30126 }] }, { "nodeId": "3", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAN7hww13zBZEMA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTQwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTQwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDK/bVyv0ZUeJZ4cIOImM+wmqtYjCw4jPAC549WQPPV1vG0lzSpgV+nRKqmWBexhLlKN3bsvrfNCUpKSq8meFyCtdppT1dhUOmEZcoNhLZzqxXb2HYYqRPv82tR+tbh+27WFsBOOqYrYTvr72ECD7qDOuw/Xob6KImaw/b/SIAPecMoYy25fkgYkJSETwd8HUpwssYH/JTLBF8eGjjTTMuu14ARQKeH8BXSs+jjV1+3IItXERS8ryUGDjqc5vC8ZW1kDVQbb91IDxRjqZbFyhuasocCqTAcZuiEgE8Wilwp2g1vbAUnHnvKNfiaEAHoEV6vF4lelaWhOnN2U5tnox/ns6PiDqIbOfs0pmXxjAK0vxc6oZM3TwdRtzo6cSb/AYfQdnmQzkra980kHN12r3f7PK2PzGBuVUPT7fLGA4S3vQDYO4rqcgTc/OLobtqLtdBusOFjZscfIfUW4GVWJUI1j+fwvHacxWLmyZwlQ5Q47UtrtjWpFru7CTn5S477lqMCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdW6AWDhT0eOJw+0O6MYngmCgkXfFsgBC/B1plaE596hHo58FHxzCNiLFdvfRj37rxujvqsDAkADWUmOzLzLHYMXu302HzDqAMNY6FZJc32y4ZDsIQpaUOAuiNHAHwFXuPRInVpCqztfJMgw4RhOhcCTEsoIJsqoIN1t4M0pEVAv6x3nJwFKZqSNOZrQ7sOW32FjwWS3kHwRsCTtqdk5n2KxU6wr/fggV3QsSPRMYro8sUfwu93mqggtswwWqfeKlsz5WiaR9aqLnb8z1R6HLvA0bcoPWzjgn8RdP+9we4z06iZ5vdBuNpwBjrCKUELWISyAoekLGGxyS8pPqYiSBRNUoaPITSuUjcCBbJ9EFvm72QgCBesbwF71KPabTPbMPhLmf+uAi+zmeu8ZeVvT6DrX9OHSkIvIEQFry9BrqOT3ce6KBHSO1HpXIetj5Wcd3WHXtz9ulBL9ikWC8eh7/+we51ucmLvFzNKznElhT2Dp+czXUVNEUjp3u/66pyRA4", "gossipEndpoint": [{ "ipAddressV4": "IqyjSg==", "port": 30127 }, { "ipAddressV4": "CoAAIA==", "port": 30127 }] }, { "nodeId": "4", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIId2RXuetTjnAwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlNTAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlNTCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKw8NsPZVKyXW3smNAUcoN/JOyhyJah6Y2gPOCek6hcOaLjg6DvZxLzinpLKwScrgRIIiuU/9hUsy6EiOtCuX3lT4ByKA5XdN1YQAXL1TS00vUf7BR1b+PW58gaJtepZctHvyklNxFeVT/vq2zWQa1sXeTz0OReJXO+8WWO1AjciYyv963zZ6jvk5yhWGukC5NJ2pJTjFh3+2+PivnLevNNnW6bZkdgSl3MThr78nsWlUQykvx+FNIgLlrq+4fCIFzXMeJRRXqo0MJlsxBfQaSu1arqMGE5BQWBEr51EH9UKNP3MSEntr1h1WMeJ3tZXFJ/z1xzKxsVII8HNtjynv1MoEGY4AQDUWI0CRUBv0uAmBUljGJVyqgHIPO6OGznJdkQsmSOSpqdVeNSWEd85Nh0Niorvpat9g5vUJ9zWKPWmgiww+RQiqq8jA2BiD/3n+I8UIMbMShiJUQBmnveAEWLMeV6TIuIBllGQ0a5oB+PQOZh0g+TvsjnH8/c1h1MSHQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQBYPcTDNQjigDZ2iY/nA9BhiyNJDHQBecLoADQ+5mxGrGclUcvd002ovbVhoGlcMkVyG/Am9/Y3t6zpO4pMQy7Jecfx8U8+VtKEPFkpfslCmEHT9EpnCGcS5t1KB2KcqnZ57f9uUZLBtMPem1FO6wcT71TXr4/+hGNuLpLmtKkQWARnVURR1/MHhcJ35BH1/x1VIeNc+PBpTE/blszn69Gwqt/HlpNTnYfqS0vhTCfOO07dxn8n904xb1nZ0l0k7pCQdRTTn+dYxmvQ7MlTRK5iOlOKIiRAbD8Fwyfo93cvDj6V56c7Gz+knyjz0iARMsptumqevmfiJ324HzV/12SukJokFDl9G6MjG99ucg3CQaIxa5kRVN1b5D+QMeowfj0lKTchGm/BbuO4zousIE+aWCGfy41CccTP11JW2quwjsVGufb5fvCfDXop5f9i1+95oHd2JyLmHDnJBWEqBHret4Dx8P8GyQhyZB6jkZpVwJy/ruPyrAL3kcKqUZecaOg=", "gossipEndpoint": [{ "ipAddressV4": "IhzSrg==", "port": 30128 }, { "ipAddressV4": "CoAAHQ==", "port": 30128 }] }] } | |||||||||
| node0 | 5.999s | 2025-10-30 11:17:45.466 | 53 | INFO | PLATFORM_STATUS | <platformForkJoinThread-5> | StatusStateMachine: | Platform spent 195.0 ms in STARTING_UP. Now in REPLAYING_EVENTS | |
| node0 | 6.004s | 2025-10-30 11:17:45.471 | 54 | INFO | PLATFORM_STATUS | <platformForkJoinThread-5> | StatusStateMachine: | Platform spent 4.0 ms in REPLAYING_EVENTS. Now in OBSERVING | |
| node2 | 6.020s | 2025-10-30 11:17:45.487 | 36 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/2/ConsistencyTestLog.csv | |
| node2 | 6.021s | 2025-10-30 11:17:45.488 | 37 | INFO | STARTUP | <main> | TransactionHandlingHistory: | No log file found. Starting without any previous history | |
| node1 | 6.030s | 2025-10-30 11:17:45.497 | 35 | INFO | STARTUP | <main> | SwirldsPlatform: | Starting with roster history: | |
| RosterHistory[ currentRosterRound: 0 ][ no previous roster set ] Current Roster: { "rosterEntries": [{ "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAK05TS8KZeb1MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTEwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBoP9dI3K1PRLRK7h90D9eNCfgzuHTyJi70yDEs90XJXlE6jmgf1NE2av83VAhQHLxu8Ehc/55M9Ayx9IQc0zJLSS+IrRM9QwqoG8ZvNdRgNw+je3V/8rAK/mHId+cPnnyDplCyskyi5kWCv6kTULIewFH8/KVZwhe0/hB2+N6ujWixURrxjjGLHA6b2gPoGAb/nxiVOn+L0cWcOzcyiYShxagj0FBWV7AxKx65Ynzfe7eF0gOzBUA+IM10OM5KXJejk53Xz5KpEyGe8htO/bXFlpLdm3UzrYiIhY0oKPYKECAC1s+VAZA6i+MV0nDpqDgxHRRXD8O2arauPhEI6iVT9f05AtzElrs7U95HbpQUuP1sxkaQw+bLdMOQHHMVCgMgw2g0eDdVDAMJD7wjZ+Bs6kDc/EJELb0l1uy2GEnOZMiHkK4K1r4IyZ/ed6QpyIRKfBCNyT5IIpMoVpzRYxVXgjgFdudd8iErKyvSXHThU6nu92c+vSd+FLBFHPpb6ECAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdga5NYtV48uDCd4vIsmpGWpKuUHtDVDlCvzHc2ij8DxAR6OFp+hIRNEBXkzg1KS5qP8Wba5ptmGoV4f89HemP+AL3Azde+HjpYRtffdfTdQwmMbw7xJg2lKkEo11gDo5+zPZnVbfb3FsZ+IXKji0QshQBfg+ddTkFG3TJG1ttq3ZDw94RxFQivVnkj1p+Ogel/DuBNRWQobFVe5VrmJqbuwwN8AdrPae1dMrkZatF91On5+cpVLGfk96fYUhDohDt6KKQ6DdhvFk5rhd0vsHGMQq2gAW2+Or6ZVsKkHKx8CPINpJVKAdpE0tItI+loMO02jf9oRI/8cThWP1vNAeWnr0D6m275EZf/4qem/DdJ0FJIVou3P7tsq7eSdueDnj5RmcbW/vOBtvlXpD3SqsVRn6sltZ0sk24p+6ZMzopevCZEMf/nL3OzGvSadisXb39H9DgwkNLlefju1QLgHWf0TGfeNHluDgVDhU8+/1/KUGtr2SnZ5EVO1l59FWHALj", "gossipEndpoint": [{ "ipAddressV4": "I95wBg==", "port": 30124 }, { "ipAddressV4": "CoAAAw==", "port": 30124 }] }, { "nodeId": "1", "weight": "12500000000", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIIHWg7e2Q/smQwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlMjAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlMjCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKr5WsBepS3+y/0/yfBjzMWje7zianEz7sszrNWV3cGu2KUlR7v2+9wp/EtX1+BdcGlTTojgFs5nEBN4lM76Cp6JjFH461yN8GSkIkpe8GZnb1w4KEjZj5UYMbq+qOUI6QmwmgLeO8RHAsS6lCP1AyGFalb2ZVJ09DcYDxCRXeFj4BqvNbtD5r5DTCtpVT4ax3eb3pzNSGsjQUG9zhyp/WcsAmwmzKdMl72tk6qF8tlAWXyzwiCujWHS0Kln0C5pyEjeFNsG299toC4pgT8juxijgseTeIFRnNHmGSeSmXpAkEELlwLKR8HOnqeiS5UXNqdbxNemx/EpJSc5rTB6kzLX24dIuRsgyIIFWx73goOzmaHUolN4xmenifoMYlSNNM07WrsvmjRC5OLc/uGhdWqhZGBCH6AJB8Cmw84QLXVdHE6LiueP1oMd7g++N4X880wJkuh0ebfV3i7etUIn0jLlM50AkRucG9kwZDJ/M4LY7FT2F85R1/o2FaB/537ARQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQB5lTkqYw0hEW+BJTFsQ8jEHfIDNRJ0kNbVuibfP+u7kzlJy15lCEi+Qw6E3d8hA1QBX3xJMxNBlrtYPrdG26hh/tOwo5Np/OfxQC5jo0Q7n7hu7aLxZRUB/q7AfdDbOun4Za6rJhT3+EsFocyARWp8bYSk3YILBMkP+2VYDRkgQidzKgKtO5yv21Y9sEgziSprc+dQb/tqn5aQZLWavFwCLwnB3t4r4qwLHkkH00Jw51uOvLeM49/t333V5Caa7wmWzMcE+KSWW0QWFRxeJrodSyjPdmDi4D8lKN5WJHSAU5L2yWIODUyWD/cvsAapTv7xXk9ja/Ssb9DpMQnM1xh0hYaESajNeL1QbGuZgPxAwrw981h7kprR2P2iMGRVGA6u4ezxmhW3s7D+yJ3+Yxs/x2J/sw65Z16mRYXRWYWHQmhgaVQjIviiAkVB6CWZo1kHl/eYaVedQzKlrTpbr3JtmwGwhYEOnrkzsC63h8/AG9gRtIAIGWGqTPWbn2pEm8M=", "gossipEndpoint": [{ "ipAddressV4": "I+g9lA==", "port": 30125 }, { "ipAddressV4": "CoAAIw==", "port": 30125 }] }, { "nodeId": "2", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAJg3GRFp5bT9MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTMwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQCl5ut2dCleDmgEneRYpAKa9Pe2qnXzgF+BEIuTfizG2OcPQi/ltv+6HxSrJXtuWNaiX/G4iP7iBzWj2ysaAYwfYj0ezTSMLRqM9hXzVgLtW0LJEF6a8vUXPsJt4GEJkUKiYCCO1MP1NLd3y/3SVJrFhwJSPqKYm2pQNg84WfPDWSkzSneOIO4Z0uWDXgs+vzSNyChWOxVieFQhLjcELtyj6narmLox+Jdo/SxUzPuktuFB3ebNgUqWPkjljgZpl00BTmbRIVHgHfDVulo2PBpXd0VplIDgdPr5zMKdTrKCuDKey8Mft72RkPKMe9LZVZ/21+rXVEh+olvvUCySsP2RkWPUJJD90c8wKo01rZsjAOXscJKQcBYlam5XXO4ZBRYzEdxuivbkPwsOoQ83swCR3alPvwfbg11Va+zXE6sRbUM9LqkYo/M3Hwg8tSIXu8oah6csputanz867dzWwyVJEPzmiXZ6ncVDQO31QlB7RndWCqKTjOQpnpblUMsrE9MCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAnUA8+kz7L+eSOm/iVvUNYF10PKO2nZtxWWL7R1vwK/2Up765PwqxKb0eSEM4bjgvZq1GuGXs9X/Y7dos42yntXvgeUY+/2JzCnw4J5tzxytZ+IKX6DR67NjDzDzVZQfptjLQrb8E7yzml0uxsqrhNPWl57Bmfe66Kg2lD11jImeeEhExlRggFukoiUWVwRNU21Q1jMUWrg2ZwfP+6fFTgRt0WR+X5zkyYPbvI6/yv7reYGjPDuZTOFhbwG8LUTQxdttDswPjnQ606kMyninL+aNelSdV/UIII7lpr/dTvgQAnrlBaGXvdy6brh3wWEwia0FZFZcKEs6M+jZ3MrFxvlTfUIdI3jRq12L10cCDi2VhORg4JmvlM+Tk6kJeSku30ZLAVo3S7GbTdvkuesOxz3UwnF7yfOA1KYOPvhv1oLxGV5z05glsn1OBKnXMdzsKFbAYYHj81bgBni2WLuIpv3oXlai2uc4y9m8LvWAQ+h/ivyog34Ai3Pvr5ZZOFgjy", "gossipEndpoint": [{ "ipAddressV4": "Iir4zA==", "port": 30126 }, { "ipAddressV4": "CoAAHA==", "port": 30126 }] }, { "nodeId": "3", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAN7hww13zBZEMA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTQwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTQwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDK/bVyv0ZUeJZ4cIOImM+wmqtYjCw4jPAC549WQPPV1vG0lzSpgV+nRKqmWBexhLlKN3bsvrfNCUpKSq8meFyCtdppT1dhUOmEZcoNhLZzqxXb2HYYqRPv82tR+tbh+27WFsBOOqYrYTvr72ECD7qDOuw/Xob6KImaw/b/SIAPecMoYy25fkgYkJSETwd8HUpwssYH/JTLBF8eGjjTTMuu14ARQKeH8BXSs+jjV1+3IItXERS8ryUGDjqc5vC8ZW1kDVQbb91IDxRjqZbFyhuasocCqTAcZuiEgE8Wilwp2g1vbAUnHnvKNfiaEAHoEV6vF4lelaWhOnN2U5tnox/ns6PiDqIbOfs0pmXxjAK0vxc6oZM3TwdRtzo6cSb/AYfQdnmQzkra980kHN12r3f7PK2PzGBuVUPT7fLGA4S3vQDYO4rqcgTc/OLobtqLtdBusOFjZscfIfUW4GVWJUI1j+fwvHacxWLmyZwlQ5Q47UtrtjWpFru7CTn5S477lqMCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdW6AWDhT0eOJw+0O6MYngmCgkXfFsgBC/B1plaE596hHo58FHxzCNiLFdvfRj37rxujvqsDAkADWUmOzLzLHYMXu302HzDqAMNY6FZJc32y4ZDsIQpaUOAuiNHAHwFXuPRInVpCqztfJMgw4RhOhcCTEsoIJsqoIN1t4M0pEVAv6x3nJwFKZqSNOZrQ7sOW32FjwWS3kHwRsCTtqdk5n2KxU6wr/fggV3QsSPRMYro8sUfwu93mqggtswwWqfeKlsz5WiaR9aqLnb8z1R6HLvA0bcoPWzjgn8RdP+9we4z06iZ5vdBuNpwBjrCKUELWISyAoekLGGxyS8pPqYiSBRNUoaPITSuUjcCBbJ9EFvm72QgCBesbwF71KPabTPbMPhLmf+uAi+zmeu8ZeVvT6DrX9OHSkIvIEQFry9BrqOT3ce6KBHSO1HpXIetj5Wcd3WHXtz9ulBL9ikWC8eh7/+we51ucmLvFzNKznElhT2Dp+czXUVNEUjp3u/66pyRA4", "gossipEndpoint": [{ "ipAddressV4": "IqyjSg==", "port": 30127 }, { "ipAddressV4": "CoAAIA==", "port": 30127 }] }, { "nodeId": "4", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIId2RXuetTjnAwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlNTAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlNTCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKw8NsPZVKyXW3smNAUcoN/JOyhyJah6Y2gPOCek6hcOaLjg6DvZxLzinpLKwScrgRIIiuU/9hUsy6EiOtCuX3lT4ByKA5XdN1YQAXL1TS00vUf7BR1b+PW58gaJtepZctHvyklNxFeVT/vq2zWQa1sXeTz0OReJXO+8WWO1AjciYyv963zZ6jvk5yhWGukC5NJ2pJTjFh3+2+PivnLevNNnW6bZkdgSl3MThr78nsWlUQykvx+FNIgLlrq+4fCIFzXMeJRRXqo0MJlsxBfQaSu1arqMGE5BQWBEr51EH9UKNP3MSEntr1h1WMeJ3tZXFJ/z1xzKxsVII8HNtjynv1MoEGY4AQDUWI0CRUBv0uAmBUljGJVyqgHIPO6OGznJdkQsmSOSpqdVeNSWEd85Nh0Niorvpat9g5vUJ9zWKPWmgiww+RQiqq8jA2BiD/3n+I8UIMbMShiJUQBmnveAEWLMeV6TIuIBllGQ0a5oB+PQOZh0g+TvsjnH8/c1h1MSHQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQBYPcTDNQjigDZ2iY/nA9BhiyNJDHQBecLoADQ+5mxGrGclUcvd002ovbVhoGlcMkVyG/Am9/Y3t6zpO4pMQy7Jecfx8U8+VtKEPFkpfslCmEHT9EpnCGcS5t1KB2KcqnZ57f9uUZLBtMPem1FO6wcT71TXr4/+hGNuLpLmtKkQWARnVURR1/MHhcJ35BH1/x1VIeNc+PBpTE/blszn69Gwqt/HlpNTnYfqS0vhTCfOO07dxn8n904xb1nZ0l0k7pCQdRTTn+dYxmvQ7MlTRK5iOlOKIiRAbD8Fwyfo93cvDj6V56c7Gz+knyjz0iARMsptumqevmfiJ324HzV/12SukJokFDl9G6MjG99ucg3CQaIxa5kRVN1b5D+QMeowfj0lKTchGm/BbuO4zousIE+aWCGfy41CccTP11JW2quwjsVGufb5fvCfDXop5f9i1+95oHd2JyLmHDnJBWEqBHret4Dx8P8GyQhyZB6jkZpVwJy/ruPyrAL3kcKqUZecaOg=", "gossipEndpoint": [{ "ipAddressV4": "IhzSrg==", "port": 30128 }, { "ipAddressV4": "CoAAHQ==", "port": 30128 }] }] } | |||||||||
| node2 | 6.032s | 2025-10-30 11:17:45.499 | 38 | INFO | STARTUP | <main> | StateInitializer: | The platform is using the following initial state: | |
| Round: 0 Timestamp: 1970-01-01T00:00:00Z Next consensus number: 0 Legacy running event hash: null Legacy running event mnemonic: null Rounds non-ancient: 0 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1 Root hash: 50e1f7af3dbb06725582ec8e0fa7008684f3548b84c370837717fad94446293af8830cf44bf0cbc6976fbedeb97017bf (root) VirtualMap state / bring-any-resist-soft | |||||||||
| node2 | 6.035s | 2025-10-30 11:17:45.502 | 40 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Starting the ReconnectController | |
| node1 | 6.053s | 2025-10-30 11:17:45.520 | 36 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/1/ConsistencyTestLog.csv | |
| node1 | 6.054s | 2025-10-30 11:17:45.521 | 37 | INFO | STARTUP | <main> | TransactionHandlingHistory: | No log file found. Starting without any previous history | |
| node1 | 6.066s | 2025-10-30 11:17:45.533 | 38 | INFO | STARTUP | <main> | StateInitializer: | The platform is using the following initial state: | |
| Round: 0 Timestamp: 1970-01-01T00:00:00Z Next consensus number: 0 Legacy running event hash: null Legacy running event mnemonic: null Rounds non-ancient: 0 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1 Root hash: 50e1f7af3dbb06725582ec8e0fa7008684f3548b84c370837717fad94446293af8830cf44bf0cbc6976fbedeb97017bf (root) VirtualMap state / bring-any-resist-soft | |||||||||
| node1 | 6.069s | 2025-10-30 11:17:45.536 | 40 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Starting the ReconnectController | |
| node3 | 6.074s | 2025-10-30 11:17:45.541 | 12 | DEBUG | STARTUP | <main> | CryptoStatic: | Done generating keys | |
| node3 | 6.173s | 2025-10-30 11:17:45.640 | 15 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node3 | 6.176s | 2025-10-30 11:17:45.643 | 16 | INFO | STARTUP | <main> | StartupStateUtils: | No saved states were found on disk. | |
| node3 | 6.213s | 2025-10-30 11:17:45.680 | 21 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | New State Constructed. | |
| node2 | 6.228s | 2025-10-30 11:17:45.695 | 41 | INFO | EVENT_STREAM | <main> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: 38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b | |
| node2 | 6.232s | 2025-10-30 11:17:45.699 | 42 | INFO | STARTUP | <platformForkJoinThread-2> | Shadowgraph: | Shadowgraph starting from expiration threshold 1 | |
| node2 | 6.236s | 2025-10-30 11:17:45.703 | 43 | INFO | STARTUP | <<start-node-2>> | ConsistencyTestingToolMain: | init called in Main for node 2. | |
| node2 | 6.237s | 2025-10-30 11:17:45.704 | 44 | INFO | STARTUP | <<start-node-2>> | SwirldsPlatform: | Starting platform 2 | |
| node2 | 6.238s | 2025-10-30 11:17:45.705 | 45 | INFO | STARTUP | <<platform: recycle-bin-cleanup>> | RecycleBinImpl: | Deleted 0 files from the recycle bin. | |
| node2 | 6.241s | 2025-10-30 11:17:45.708 | 46 | INFO | STARTUP | <<start-node-2>> | CycleFinder: | No cyclical back pressure detected in wiring model. | |
| node2 | 6.243s | 2025-10-30 11:17:45.710 | 47 | INFO | STARTUP | <<start-node-2>> | DirectSchedulerChecks: | No illegal direct scheduler use detected in the wiring model. | |
| node2 | 6.243s | 2025-10-30 11:17:45.710 | 48 | INFO | STARTUP | <<start-node-2>> | InputWireChecks: | All input wires have been bound. | |
| node2 | 6.245s | 2025-10-30 11:17:45.712 | 49 | WARN | STARTUP | <<start-node-2>> | PcesFileTracker: | No preconsensus event files available | |
| node2 | 6.245s | 2025-10-30 11:17:45.712 | 50 | INFO | STARTUP | <<start-node-2>> | SwirldsPlatform: | replaying preconsensus event stream starting at 0 | |
| node2 | 6.247s | 2025-10-30 11:17:45.714 | 51 | INFO | STARTUP | <<start-node-2>> | PcesReplayer: | Replayed 0 preconsensus events with max birth round -1. These events contained 0 transactions. 0 rounds reached consensus spanning 0.0 nanoseconds of consensus time. The latest round to reach consensus is round 0. Replay took 0.0 nanoseconds. | |
| node2 | 6.248s | 2025-10-30 11:17:45.715 | 52 | INFO | STARTUP | <<app: appMain 2>> | ConsistencyTestingToolMain: | run called in Main. | |
| node2 | 6.250s | 2025-10-30 11:17:45.717 | 53 | INFO | PLATFORM_STATUS | <platformForkJoinThread-1> | StatusStateMachine: | Platform spent 163.0 ms in STARTING_UP. Now in REPLAYING_EVENTS | |
| node2 | 6.255s | 2025-10-30 11:17:45.722 | 54 | INFO | PLATFORM_STATUS | <platformForkJoinThread-1> | StatusStateMachine: | Platform spent 4.0 ms in REPLAYING_EVENTS. Now in OBSERVING | |
| node1 | 6.283s | 2025-10-30 11:17:45.750 | 41 | INFO | EVENT_STREAM | <main> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: 38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b | |
| node1 | 6.288s | 2025-10-30 11:17:45.755 | 42 | INFO | STARTUP | <platformForkJoinThread-2> | Shadowgraph: | Shadowgraph starting from expiration threshold 1 | |
| node1 | 6.292s | 2025-10-30 11:17:45.759 | 43 | INFO | STARTUP | <<start-node-1>> | ConsistencyTestingToolMain: | init called in Main for node 1. | |
| node1 | 6.293s | 2025-10-30 11:17:45.760 | 44 | INFO | STARTUP | <<start-node-1>> | SwirldsPlatform: | Starting platform 1 | |
| node1 | 6.294s | 2025-10-30 11:17:45.761 | 45 | INFO | STARTUP | <<platform: recycle-bin-cleanup>> | RecycleBinImpl: | Deleted 0 files from the recycle bin. | |
| node1 | 6.297s | 2025-10-30 11:17:45.764 | 46 | INFO | STARTUP | <<start-node-1>> | CycleFinder: | No cyclical back pressure detected in wiring model. | |
| node1 | 6.298s | 2025-10-30 11:17:45.765 | 47 | INFO | STARTUP | <<start-node-1>> | DirectSchedulerChecks: | No illegal direct scheduler use detected in the wiring model. | |
| node1 | 6.299s | 2025-10-30 11:17:45.766 | 48 | INFO | STARTUP | <<start-node-1>> | InputWireChecks: | All input wires have been bound. | |
| node1 | 6.301s | 2025-10-30 11:17:45.768 | 49 | WARN | STARTUP | <<start-node-1>> | PcesFileTracker: | No preconsensus event files available | |
| node1 | 6.301s | 2025-10-30 11:17:45.768 | 50 | INFO | STARTUP | <<start-node-1>> | SwirldsPlatform: | replaying preconsensus event stream starting at 0 | |
| node1 | 6.303s | 2025-10-30 11:17:45.770 | 51 | INFO | STARTUP | <<start-node-1>> | PcesReplayer: | Replayed 0 preconsensus events with max birth round -1. These events contained 0 transactions. 0 rounds reached consensus spanning 0.0 nanoseconds of consensus time. The latest round to reach consensus is round 0. Replay took 0.0 nanoseconds. | |
| node1 | 6.305s | 2025-10-30 11:17:45.772 | 52 | INFO | STARTUP | <<app: appMain 1>> | ConsistencyTestingToolMain: | run called in Main. | |
| node1 | 6.306s | 2025-10-30 11:17:45.773 | 53 | INFO | PLATFORM_STATUS | <platformForkJoinThread-6> | StatusStateMachine: | Platform spent 184.0 ms in STARTING_UP. Now in REPLAYING_EVENTS | |
| node1 | 6.311s | 2025-10-30 11:17:45.778 | 54 | INFO | PLATFORM_STATUS | <platformForkJoinThread-6> | StatusStateMachine: | Platform spent 3.0 ms in REPLAYING_EVENTS. Now in OBSERVING | |
| node4 | 6.993s | 2025-10-30 11:17:46.460 | 12 | DEBUG | STARTUP | <main> | CryptoStatic: | Done generating keys | |
| node3 | 7.001s | 2025-10-30 11:17:46.468 | 24 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node3 | 7.003s | 2025-10-30 11:17:46.470 | 27 | INFO | STARTUP | <main> | BootstrapUtils: | Not upgrading software, current software is version SemanticVersion[major=1, minor=0, patch=0, pre=, build=]. | |
| node3 | 7.009s | 2025-10-30 11:17:46.476 | 28 | INFO | STARTUP | <main> | AddressBookInitializer: | Starting from genesis: using the config address book. | |
| node3 | 7.019s | 2025-10-30 11:17:46.486 | 29 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node3 | 7.021s | 2025-10-30 11:17:46.488 | 30 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 7.107s | 2025-10-30 11:17:46.574 | 15 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 7.111s | 2025-10-30 11:17:46.578 | 16 | INFO | STARTUP | <main> | StartupStateUtils: | No saved states were found on disk. | |
| node4 | 7.161s | 2025-10-30 11:17:46.628 | 21 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | New State Constructed. | |
| node4 | 8.083s | 2025-10-30 11:17:47.550 | 24 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 8.087s | 2025-10-30 11:17:47.554 | 27 | INFO | STARTUP | <main> | BootstrapUtils: | Not upgrading software, current software is version SemanticVersion[major=1, minor=0, patch=0, pre=, build=]. | |
| node4 | 8.094s | 2025-10-30 11:17:47.561 | 28 | INFO | STARTUP | <main> | AddressBookInitializer: | Starting from genesis: using the config address book. | |
| node4 | 8.109s | 2025-10-30 11:17:47.576 | 29 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 8.113s | 2025-10-30 11:17:47.580 | 30 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node3 | 8.155s | 2025-10-30 11:17:47.622 | 31 | INFO | STARTUP | <main> | OSHealthChecker: | ||
| PASSED - Clock Source Speed Check Report[callsPerSec=26260040] PASSED - Entropy Check Report[success=true, entropySource=Strong Instance, elapsedNanos=316910, randomLong=-2683833342649502631, exception=null] PASSED - Entropy Check Report[success=true, entropySource=NativePRNGBlocking:SUN, elapsedNanos=16051, randomLong=-3125392671785946529, exception=null] PASSED - File System Check Report[code=SUCCESS, readNanos=1594960, data=35, exception=null] OS Health Check Report - Complete (took 1025 ms) | |||||||||
| node3 | 8.188s | 2025-10-30 11:17:47.655 | 32 | DEBUG | STARTUP | <main> | BootstrapUtils: | jvmPauseDetectorThread started | |
| node3 | 8.197s | 2025-10-30 11:17:47.664 | 33 | INFO | STARTUP | <main> | StandardScratchpad: | Scratchpad platform.iss contents: | |
| LAST_ISS_ROUND null | |||||||||
| node3 | 8.199s | 2025-10-30 11:17:47.666 | 34 | INFO | STARTUP | <main> | PlatformBuilder: | Default platform pool parallelism: 8 | |
| node3 | 8.288s | 2025-10-30 11:17:47.755 | 35 | INFO | STARTUP | <main> | SwirldsPlatform: | Starting with roster history: | |
| RosterHistory[ currentRosterRound: 0 ][ no previous roster set ] Current Roster: { "rosterEntries": [{ "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAK05TS8KZeb1MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTEwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBoP9dI3K1PRLRK7h90D9eNCfgzuHTyJi70yDEs90XJXlE6jmgf1NE2av83VAhQHLxu8Ehc/55M9Ayx9IQc0zJLSS+IrRM9QwqoG8ZvNdRgNw+je3V/8rAK/mHId+cPnnyDplCyskyi5kWCv6kTULIewFH8/KVZwhe0/hB2+N6ujWixURrxjjGLHA6b2gPoGAb/nxiVOn+L0cWcOzcyiYShxagj0FBWV7AxKx65Ynzfe7eF0gOzBUA+IM10OM5KXJejk53Xz5KpEyGe8htO/bXFlpLdm3UzrYiIhY0oKPYKECAC1s+VAZA6i+MV0nDpqDgxHRRXD8O2arauPhEI6iVT9f05AtzElrs7U95HbpQUuP1sxkaQw+bLdMOQHHMVCgMgw2g0eDdVDAMJD7wjZ+Bs6kDc/EJELb0l1uy2GEnOZMiHkK4K1r4IyZ/ed6QpyIRKfBCNyT5IIpMoVpzRYxVXgjgFdudd8iErKyvSXHThU6nu92c+vSd+FLBFHPpb6ECAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdga5NYtV48uDCd4vIsmpGWpKuUHtDVDlCvzHc2ij8DxAR6OFp+hIRNEBXkzg1KS5qP8Wba5ptmGoV4f89HemP+AL3Azde+HjpYRtffdfTdQwmMbw7xJg2lKkEo11gDo5+zPZnVbfb3FsZ+IXKji0QshQBfg+ddTkFG3TJG1ttq3ZDw94RxFQivVnkj1p+Ogel/DuBNRWQobFVe5VrmJqbuwwN8AdrPae1dMrkZatF91On5+cpVLGfk96fYUhDohDt6KKQ6DdhvFk5rhd0vsHGMQq2gAW2+Or6ZVsKkHKx8CPINpJVKAdpE0tItI+loMO02jf9oRI/8cThWP1vNAeWnr0D6m275EZf/4qem/DdJ0FJIVou3P7tsq7eSdueDnj5RmcbW/vOBtvlXpD3SqsVRn6sltZ0sk24p+6ZMzopevCZEMf/nL3OzGvSadisXb39H9DgwkNLlefju1QLgHWf0TGfeNHluDgVDhU8+/1/KUGtr2SnZ5EVO1l59FWHALj", "gossipEndpoint": [{ "ipAddressV4": "I95wBg==", "port": 30124 }, { "ipAddressV4": "CoAAAw==", "port": 30124 }] }, { "nodeId": "1", "weight": "12500000000", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIIHWg7e2Q/smQwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlMjAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlMjCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKr5WsBepS3+y/0/yfBjzMWje7zianEz7sszrNWV3cGu2KUlR7v2+9wp/EtX1+BdcGlTTojgFs5nEBN4lM76Cp6JjFH461yN8GSkIkpe8GZnb1w4KEjZj5UYMbq+qOUI6QmwmgLeO8RHAsS6lCP1AyGFalb2ZVJ09DcYDxCRXeFj4BqvNbtD5r5DTCtpVT4ax3eb3pzNSGsjQUG9zhyp/WcsAmwmzKdMl72tk6qF8tlAWXyzwiCujWHS0Kln0C5pyEjeFNsG299toC4pgT8juxijgseTeIFRnNHmGSeSmXpAkEELlwLKR8HOnqeiS5UXNqdbxNemx/EpJSc5rTB6kzLX24dIuRsgyIIFWx73goOzmaHUolN4xmenifoMYlSNNM07WrsvmjRC5OLc/uGhdWqhZGBCH6AJB8Cmw84QLXVdHE6LiueP1oMd7g++N4X880wJkuh0ebfV3i7etUIn0jLlM50AkRucG9kwZDJ/M4LY7FT2F85R1/o2FaB/537ARQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQB5lTkqYw0hEW+BJTFsQ8jEHfIDNRJ0kNbVuibfP+u7kzlJy15lCEi+Qw6E3d8hA1QBX3xJMxNBlrtYPrdG26hh/tOwo5Np/OfxQC5jo0Q7n7hu7aLxZRUB/q7AfdDbOun4Za6rJhT3+EsFocyARWp8bYSk3YILBMkP+2VYDRkgQidzKgKtO5yv21Y9sEgziSprc+dQb/tqn5aQZLWavFwCLwnB3t4r4qwLHkkH00Jw51uOvLeM49/t333V5Caa7wmWzMcE+KSWW0QWFRxeJrodSyjPdmDi4D8lKN5WJHSAU5L2yWIODUyWD/cvsAapTv7xXk9ja/Ssb9DpMQnM1xh0hYaESajNeL1QbGuZgPxAwrw981h7kprR2P2iMGRVGA6u4ezxmhW3s7D+yJ3+Yxs/x2J/sw65Z16mRYXRWYWHQmhgaVQjIviiAkVB6CWZo1kHl/eYaVedQzKlrTpbr3JtmwGwhYEOnrkzsC63h8/AG9gRtIAIGWGqTPWbn2pEm8M=", "gossipEndpoint": [{ "ipAddressV4": "I+g9lA==", "port": 30125 }, { "ipAddressV4": "CoAAIw==", "port": 30125 }] }, { "nodeId": "2", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAJg3GRFp5bT9MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTMwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQCl5ut2dCleDmgEneRYpAKa9Pe2qnXzgF+BEIuTfizG2OcPQi/ltv+6HxSrJXtuWNaiX/G4iP7iBzWj2ysaAYwfYj0ezTSMLRqM9hXzVgLtW0LJEF6a8vUXPsJt4GEJkUKiYCCO1MP1NLd3y/3SVJrFhwJSPqKYm2pQNg84WfPDWSkzSneOIO4Z0uWDXgs+vzSNyChWOxVieFQhLjcELtyj6narmLox+Jdo/SxUzPuktuFB3ebNgUqWPkjljgZpl00BTmbRIVHgHfDVulo2PBpXd0VplIDgdPr5zMKdTrKCuDKey8Mft72RkPKMe9LZVZ/21+rXVEh+olvvUCySsP2RkWPUJJD90c8wKo01rZsjAOXscJKQcBYlam5XXO4ZBRYzEdxuivbkPwsOoQ83swCR3alPvwfbg11Va+zXE6sRbUM9LqkYo/M3Hwg8tSIXu8oah6csputanz867dzWwyVJEPzmiXZ6ncVDQO31QlB7RndWCqKTjOQpnpblUMsrE9MCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAnUA8+kz7L+eSOm/iVvUNYF10PKO2nZtxWWL7R1vwK/2Up765PwqxKb0eSEM4bjgvZq1GuGXs9X/Y7dos42yntXvgeUY+/2JzCnw4J5tzxytZ+IKX6DR67NjDzDzVZQfptjLQrb8E7yzml0uxsqrhNPWl57Bmfe66Kg2lD11jImeeEhExlRggFukoiUWVwRNU21Q1jMUWrg2ZwfP+6fFTgRt0WR+X5zkyYPbvI6/yv7reYGjPDuZTOFhbwG8LUTQxdttDswPjnQ606kMyninL+aNelSdV/UIII7lpr/dTvgQAnrlBaGXvdy6brh3wWEwia0FZFZcKEs6M+jZ3MrFxvlTfUIdI3jRq12L10cCDi2VhORg4JmvlM+Tk6kJeSku30ZLAVo3S7GbTdvkuesOxz3UwnF7yfOA1KYOPvhv1oLxGV5z05glsn1OBKnXMdzsKFbAYYHj81bgBni2WLuIpv3oXlai2uc4y9m8LvWAQ+h/ivyog34Ai3Pvr5ZZOFgjy", "gossipEndpoint": [{ "ipAddressV4": "Iir4zA==", "port": 30126 }, { "ipAddressV4": "CoAAHA==", "port": 30126 }] }, { "nodeId": "3", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAN7hww13zBZEMA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTQwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTQwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDK/bVyv0ZUeJZ4cIOImM+wmqtYjCw4jPAC549WQPPV1vG0lzSpgV+nRKqmWBexhLlKN3bsvrfNCUpKSq8meFyCtdppT1dhUOmEZcoNhLZzqxXb2HYYqRPv82tR+tbh+27WFsBOOqYrYTvr72ECD7qDOuw/Xob6KImaw/b/SIAPecMoYy25fkgYkJSETwd8HUpwssYH/JTLBF8eGjjTTMuu14ARQKeH8BXSs+jjV1+3IItXERS8ryUGDjqc5vC8ZW1kDVQbb91IDxRjqZbFyhuasocCqTAcZuiEgE8Wilwp2g1vbAUnHnvKNfiaEAHoEV6vF4lelaWhOnN2U5tnox/ns6PiDqIbOfs0pmXxjAK0vxc6oZM3TwdRtzo6cSb/AYfQdnmQzkra980kHN12r3f7PK2PzGBuVUPT7fLGA4S3vQDYO4rqcgTc/OLobtqLtdBusOFjZscfIfUW4GVWJUI1j+fwvHacxWLmyZwlQ5Q47UtrtjWpFru7CTn5S477lqMCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdW6AWDhT0eOJw+0O6MYngmCgkXfFsgBC/B1plaE596hHo58FHxzCNiLFdvfRj37rxujvqsDAkADWUmOzLzLHYMXu302HzDqAMNY6FZJc32y4ZDsIQpaUOAuiNHAHwFXuPRInVpCqztfJMgw4RhOhcCTEsoIJsqoIN1t4M0pEVAv6x3nJwFKZqSNOZrQ7sOW32FjwWS3kHwRsCTtqdk5n2KxU6wr/fggV3QsSPRMYro8sUfwu93mqggtswwWqfeKlsz5WiaR9aqLnb8z1R6HLvA0bcoPWzjgn8RdP+9we4z06iZ5vdBuNpwBjrCKUELWISyAoekLGGxyS8pPqYiSBRNUoaPITSuUjcCBbJ9EFvm72QgCBesbwF71KPabTPbMPhLmf+uAi+zmeu8ZeVvT6DrX9OHSkIvIEQFry9BrqOT3ce6KBHSO1HpXIetj5Wcd3WHXtz9ulBL9ikWC8eh7/+we51ucmLvFzNKznElhT2Dp+czXUVNEUjp3u/66pyRA4", "gossipEndpoint": [{ "ipAddressV4": "IqyjSg==", "port": 30127 }, { "ipAddressV4": "CoAAIA==", "port": 30127 }] }, { "nodeId": "4", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIId2RXuetTjnAwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlNTAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlNTCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKw8NsPZVKyXW3smNAUcoN/JOyhyJah6Y2gPOCek6hcOaLjg6DvZxLzinpLKwScrgRIIiuU/9hUsy6EiOtCuX3lT4ByKA5XdN1YQAXL1TS00vUf7BR1b+PW58gaJtepZctHvyklNxFeVT/vq2zWQa1sXeTz0OReJXO+8WWO1AjciYyv963zZ6jvk5yhWGukC5NJ2pJTjFh3+2+PivnLevNNnW6bZkdgSl3MThr78nsWlUQykvx+FNIgLlrq+4fCIFzXMeJRRXqo0MJlsxBfQaSu1arqMGE5BQWBEr51EH9UKNP3MSEntr1h1WMeJ3tZXFJ/z1xzKxsVII8HNtjynv1MoEGY4AQDUWI0CRUBv0uAmBUljGJVyqgHIPO6OGznJdkQsmSOSpqdVeNSWEd85Nh0Niorvpat9g5vUJ9zWKPWmgiww+RQiqq8jA2BiD/3n+I8UIMbMShiJUQBmnveAEWLMeV6TIuIBllGQ0a5oB+PQOZh0g+TvsjnH8/c1h1MSHQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQBYPcTDNQjigDZ2iY/nA9BhiyNJDHQBecLoADQ+5mxGrGclUcvd002ovbVhoGlcMkVyG/Am9/Y3t6zpO4pMQy7Jecfx8U8+VtKEPFkpfslCmEHT9EpnCGcS5t1KB2KcqnZ57f9uUZLBtMPem1FO6wcT71TXr4/+hGNuLpLmtKkQWARnVURR1/MHhcJ35BH1/x1VIeNc+PBpTE/blszn69Gwqt/HlpNTnYfqS0vhTCfOO07dxn8n904xb1nZ0l0k7pCQdRTTn+dYxmvQ7MlTRK5iOlOKIiRAbD8Fwyfo93cvDj6V56c7Gz+knyjz0iARMsptumqevmfiJ324HzV/12SukJokFDl9G6MjG99ucg3CQaIxa5kRVN1b5D+QMeowfj0lKTchGm/BbuO4zousIE+aWCGfy41CccTP11JW2quwjsVGufb5fvCfDXop5f9i1+95oHd2JyLmHDnJBWEqBHret4Dx8P8GyQhyZB6jkZpVwJy/ruPyrAL3kcKqUZecaOg=", "gossipEndpoint": [{ "ipAddressV4": "IhzSrg==", "port": 30128 }, { "ipAddressV4": "CoAAHQ==", "port": 30128 }] }] } | |||||||||
| node3 | 8.311s | 2025-10-30 11:17:47.778 | 36 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/3/ConsistencyTestLog.csv | |
| node3 | 8.312s | 2025-10-30 11:17:47.779 | 37 | INFO | STARTUP | <main> | TransactionHandlingHistory: | No log file found. Starting without any previous history | |
| node3 | 8.325s | 2025-10-30 11:17:47.792 | 38 | INFO | STARTUP | <main> | StateInitializer: | The platform is using the following initial state: | |
| Round: 0 Timestamp: 1970-01-01T00:00:00Z Next consensus number: 0 Legacy running event hash: null Legacy running event mnemonic: null Rounds non-ancient: 0 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1 Root hash: 50e1f7af3dbb06725582ec8e0fa7008684f3548b84c370837717fad94446293af8830cf44bf0cbc6976fbedeb97017bf (root) VirtualMap state / bring-any-resist-soft | |||||||||
| node3 | 8.329s | 2025-10-30 11:17:47.796 | 40 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Starting the ReconnectController | |
| node3 | 8.551s | 2025-10-30 11:17:48.018 | 41 | INFO | EVENT_STREAM | <main> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: 38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b | |
| node3 | 8.556s | 2025-10-30 11:17:48.023 | 42 | INFO | STARTUP | <platformForkJoinThread-2> | Shadowgraph: | Shadowgraph starting from expiration threshold 1 | |
| node3 | 8.561s | 2025-10-30 11:17:48.028 | 43 | INFO | STARTUP | <<start-node-3>> | ConsistencyTestingToolMain: | init called in Main for node 3. | |
| node3 | 8.562s | 2025-10-30 11:17:48.029 | 44 | INFO | STARTUP | <<start-node-3>> | SwirldsPlatform: | Starting platform 3 | |
| node3 | 8.563s | 2025-10-30 11:17:48.030 | 45 | INFO | STARTUP | <<platform: recycle-bin-cleanup>> | RecycleBinImpl: | Deleted 0 files from the recycle bin. | |
| node3 | 8.566s | 2025-10-30 11:17:48.033 | 46 | INFO | STARTUP | <<start-node-3>> | CycleFinder: | No cyclical back pressure detected in wiring model. | |
| node3 | 8.567s | 2025-10-30 11:17:48.034 | 47 | INFO | STARTUP | <<start-node-3>> | DirectSchedulerChecks: | No illegal direct scheduler use detected in the wiring model. | |
| node3 | 8.568s | 2025-10-30 11:17:48.035 | 48 | INFO | STARTUP | <<start-node-3>> | InputWireChecks: | All input wires have been bound. | |
| node3 | 8.570s | 2025-10-30 11:17:48.037 | 49 | WARN | STARTUP | <<start-node-3>> | PcesFileTracker: | No preconsensus event files available | |
| node3 | 8.570s | 2025-10-30 11:17:48.037 | 50 | INFO | STARTUP | <<start-node-3>> | SwirldsPlatform: | replaying preconsensus event stream starting at 0 | |
| node3 | 8.572s | 2025-10-30 11:17:48.039 | 51 | INFO | STARTUP | <<start-node-3>> | PcesReplayer: | Replayed 0 preconsensus events with max birth round -1. These events contained 0 transactions. 0 rounds reached consensus spanning 0.0 nanoseconds of consensus time. The latest round to reach consensus is round 0. Replay took 0.0 nanoseconds. | |
| node3 | 8.573s | 2025-10-30 11:17:48.040 | 52 | INFO | STARTUP | <<app: appMain 3>> | ConsistencyTestingToolMain: | run called in Main. | |
| node3 | 8.575s | 2025-10-30 11:17:48.042 | 53 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 185.0 ms in STARTING_UP. Now in REPLAYING_EVENTS | |
| node3 | 8.579s | 2025-10-30 11:17:48.046 | 54 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 4.0 ms in REPLAYING_EVENTS. Now in OBSERVING | |
| node0 | 8.995s | 2025-10-30 11:17:48.462 | 55 | INFO | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Initializing statistics output in CSV format [ csvOutputFolder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats', csvFileName = 'PlatformTesting0.csv' ] | |
| node0 | 8.997s | 2025-10-30 11:17:48.464 | 56 | DEBUG | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Using the existing metrics folder [ folder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats' ] | |
| node4 | 9.247s | 2025-10-30 11:17:48.714 | 31 | INFO | STARTUP | <main> | OSHealthChecker: | ||
| PASSED - Clock Source Speed Check Report[callsPerSec=26156598] PASSED - Entropy Check Report[success=true, entropySource=Strong Instance, elapsedNanos=173310, randomLong=4832781441687121960, exception=null] PASSED - Entropy Check Report[success=true, entropySource=NativePRNGBlocking:SUN, elapsedNanos=35260, randomLong=-4691410001716228068, exception=null] PASSED - File System Check Report[code=SUCCESS, readNanos=1612188, data=35, exception=null] OS Health Check Report - Complete (took 1029 ms) | |||||||||
| node2 | 9.249s | 2025-10-30 11:17:48.716 | 55 | INFO | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Initializing statistics output in CSV format [ csvOutputFolder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats', csvFileName = 'PlatformTesting2.csv' ] | |
| node2 | 9.252s | 2025-10-30 11:17:48.719 | 56 | DEBUG | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Using the existing metrics folder [ folder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats' ] | |
| node4 | 9.286s | 2025-10-30 11:17:48.753 | 32 | DEBUG | STARTUP | <main> | BootstrapUtils: | jvmPauseDetectorThread started | |
| node4 | 9.298s | 2025-10-30 11:17:48.765 | 33 | INFO | STARTUP | <main> | StandardScratchpad: | Scratchpad platform.iss contents: | |
| LAST_ISS_ROUND null | |||||||||
| node4 | 9.300s | 2025-10-30 11:17:48.767 | 34 | INFO | STARTUP | <main> | PlatformBuilder: | Default platform pool parallelism: 8 | |
| node1 | 9.305s | 2025-10-30 11:17:48.772 | 55 | INFO | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Initializing statistics output in CSV format [ csvOutputFolder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats', csvFileName = 'PlatformTesting1.csv' ] | |
| node1 | 9.307s | 2025-10-30 11:17:48.774 | 56 | DEBUG | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Using the existing metrics folder [ folder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats' ] | |
| node4 | 9.407s | 2025-10-30 11:17:48.874 | 35 | INFO | STARTUP | <main> | SwirldsPlatform: | Starting with roster history: | |
| RosterHistory[ currentRosterRound: 0 ][ no previous roster set ] Current Roster: { "rosterEntries": [{ "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAK05TS8KZeb1MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTEwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBoP9dI3K1PRLRK7h90D9eNCfgzuHTyJi70yDEs90XJXlE6jmgf1NE2av83VAhQHLxu8Ehc/55M9Ayx9IQc0zJLSS+IrRM9QwqoG8ZvNdRgNw+je3V/8rAK/mHId+cPnnyDplCyskyi5kWCv6kTULIewFH8/KVZwhe0/hB2+N6ujWixURrxjjGLHA6b2gPoGAb/nxiVOn+L0cWcOzcyiYShxagj0FBWV7AxKx65Ynzfe7eF0gOzBUA+IM10OM5KXJejk53Xz5KpEyGe8htO/bXFlpLdm3UzrYiIhY0oKPYKECAC1s+VAZA6i+MV0nDpqDgxHRRXD8O2arauPhEI6iVT9f05AtzElrs7U95HbpQUuP1sxkaQw+bLdMOQHHMVCgMgw2g0eDdVDAMJD7wjZ+Bs6kDc/EJELb0l1uy2GEnOZMiHkK4K1r4IyZ/ed6QpyIRKfBCNyT5IIpMoVpzRYxVXgjgFdudd8iErKyvSXHThU6nu92c+vSd+FLBFHPpb6ECAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdga5NYtV48uDCd4vIsmpGWpKuUHtDVDlCvzHc2ij8DxAR6OFp+hIRNEBXkzg1KS5qP8Wba5ptmGoV4f89HemP+AL3Azde+HjpYRtffdfTdQwmMbw7xJg2lKkEo11gDo5+zPZnVbfb3FsZ+IXKji0QshQBfg+ddTkFG3TJG1ttq3ZDw94RxFQivVnkj1p+Ogel/DuBNRWQobFVe5VrmJqbuwwN8AdrPae1dMrkZatF91On5+cpVLGfk96fYUhDohDt6KKQ6DdhvFk5rhd0vsHGMQq2gAW2+Or6ZVsKkHKx8CPINpJVKAdpE0tItI+loMO02jf9oRI/8cThWP1vNAeWnr0D6m275EZf/4qem/DdJ0FJIVou3P7tsq7eSdueDnj5RmcbW/vOBtvlXpD3SqsVRn6sltZ0sk24p+6ZMzopevCZEMf/nL3OzGvSadisXb39H9DgwkNLlefju1QLgHWf0TGfeNHluDgVDhU8+/1/KUGtr2SnZ5EVO1l59FWHALj", "gossipEndpoint": [{ "ipAddressV4": "I95wBg==", "port": 30124 }, { "ipAddressV4": "CoAAAw==", "port": 30124 }] }, { "nodeId": "1", "weight": "12500000000", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIIHWg7e2Q/smQwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlMjAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlMjCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKr5WsBepS3+y/0/yfBjzMWje7zianEz7sszrNWV3cGu2KUlR7v2+9wp/EtX1+BdcGlTTojgFs5nEBN4lM76Cp6JjFH461yN8GSkIkpe8GZnb1w4KEjZj5UYMbq+qOUI6QmwmgLeO8RHAsS6lCP1AyGFalb2ZVJ09DcYDxCRXeFj4BqvNbtD5r5DTCtpVT4ax3eb3pzNSGsjQUG9zhyp/WcsAmwmzKdMl72tk6qF8tlAWXyzwiCujWHS0Kln0C5pyEjeFNsG299toC4pgT8juxijgseTeIFRnNHmGSeSmXpAkEELlwLKR8HOnqeiS5UXNqdbxNemx/EpJSc5rTB6kzLX24dIuRsgyIIFWx73goOzmaHUolN4xmenifoMYlSNNM07WrsvmjRC5OLc/uGhdWqhZGBCH6AJB8Cmw84QLXVdHE6LiueP1oMd7g++N4X880wJkuh0ebfV3i7etUIn0jLlM50AkRucG9kwZDJ/M4LY7FT2F85R1/o2FaB/537ARQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQB5lTkqYw0hEW+BJTFsQ8jEHfIDNRJ0kNbVuibfP+u7kzlJy15lCEi+Qw6E3d8hA1QBX3xJMxNBlrtYPrdG26hh/tOwo5Np/OfxQC5jo0Q7n7hu7aLxZRUB/q7AfdDbOun4Za6rJhT3+EsFocyARWp8bYSk3YILBMkP+2VYDRkgQidzKgKtO5yv21Y9sEgziSprc+dQb/tqn5aQZLWavFwCLwnB3t4r4qwLHkkH00Jw51uOvLeM49/t333V5Caa7wmWzMcE+KSWW0QWFRxeJrodSyjPdmDi4D8lKN5WJHSAU5L2yWIODUyWD/cvsAapTv7xXk9ja/Ssb9DpMQnM1xh0hYaESajNeL1QbGuZgPxAwrw981h7kprR2P2iMGRVGA6u4ezxmhW3s7D+yJ3+Yxs/x2J/sw65Z16mRYXRWYWHQmhgaVQjIviiAkVB6CWZo1kHl/eYaVedQzKlrTpbr3JtmwGwhYEOnrkzsC63h8/AG9gRtIAIGWGqTPWbn2pEm8M=", "gossipEndpoint": [{ "ipAddressV4": "I+g9lA==", "port": 30125 }, { "ipAddressV4": "CoAAIw==", "port": 30125 }] }, { "nodeId": "2", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAJg3GRFp5bT9MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTMwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQCl5ut2dCleDmgEneRYpAKa9Pe2qnXzgF+BEIuTfizG2OcPQi/ltv+6HxSrJXtuWNaiX/G4iP7iBzWj2ysaAYwfYj0ezTSMLRqM9hXzVgLtW0LJEF6a8vUXPsJt4GEJkUKiYCCO1MP1NLd3y/3SVJrFhwJSPqKYm2pQNg84WfPDWSkzSneOIO4Z0uWDXgs+vzSNyChWOxVieFQhLjcELtyj6narmLox+Jdo/SxUzPuktuFB3ebNgUqWPkjljgZpl00BTmbRIVHgHfDVulo2PBpXd0VplIDgdPr5zMKdTrKCuDKey8Mft72RkPKMe9LZVZ/21+rXVEh+olvvUCySsP2RkWPUJJD90c8wKo01rZsjAOXscJKQcBYlam5XXO4ZBRYzEdxuivbkPwsOoQ83swCR3alPvwfbg11Va+zXE6sRbUM9LqkYo/M3Hwg8tSIXu8oah6csputanz867dzWwyVJEPzmiXZ6ncVDQO31QlB7RndWCqKTjOQpnpblUMsrE9MCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAnUA8+kz7L+eSOm/iVvUNYF10PKO2nZtxWWL7R1vwK/2Up765PwqxKb0eSEM4bjgvZq1GuGXs9X/Y7dos42yntXvgeUY+/2JzCnw4J5tzxytZ+IKX6DR67NjDzDzVZQfptjLQrb8E7yzml0uxsqrhNPWl57Bmfe66Kg2lD11jImeeEhExlRggFukoiUWVwRNU21Q1jMUWrg2ZwfP+6fFTgRt0WR+X5zkyYPbvI6/yv7reYGjPDuZTOFhbwG8LUTQxdttDswPjnQ606kMyninL+aNelSdV/UIII7lpr/dTvgQAnrlBaGXvdy6brh3wWEwia0FZFZcKEs6M+jZ3MrFxvlTfUIdI3jRq12L10cCDi2VhORg4JmvlM+Tk6kJeSku30ZLAVo3S7GbTdvkuesOxz3UwnF7yfOA1KYOPvhv1oLxGV5z05glsn1OBKnXMdzsKFbAYYHj81bgBni2WLuIpv3oXlai2uc4y9m8LvWAQ+h/ivyog34Ai3Pvr5ZZOFgjy", "gossipEndpoint": [{ "ipAddressV4": "Iir4zA==", "port": 30126 }, { "ipAddressV4": "CoAAHA==", "port": 30126 }] }, { "nodeId": "3", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAN7hww13zBZEMA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTQwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTQwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDK/bVyv0ZUeJZ4cIOImM+wmqtYjCw4jPAC549WQPPV1vG0lzSpgV+nRKqmWBexhLlKN3bsvrfNCUpKSq8meFyCtdppT1dhUOmEZcoNhLZzqxXb2HYYqRPv82tR+tbh+27WFsBOOqYrYTvr72ECD7qDOuw/Xob6KImaw/b/SIAPecMoYy25fkgYkJSETwd8HUpwssYH/JTLBF8eGjjTTMuu14ARQKeH8BXSs+jjV1+3IItXERS8ryUGDjqc5vC8ZW1kDVQbb91IDxRjqZbFyhuasocCqTAcZuiEgE8Wilwp2g1vbAUnHnvKNfiaEAHoEV6vF4lelaWhOnN2U5tnox/ns6PiDqIbOfs0pmXxjAK0vxc6oZM3TwdRtzo6cSb/AYfQdnmQzkra980kHN12r3f7PK2PzGBuVUPT7fLGA4S3vQDYO4rqcgTc/OLobtqLtdBusOFjZscfIfUW4GVWJUI1j+fwvHacxWLmyZwlQ5Q47UtrtjWpFru7CTn5S477lqMCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdW6AWDhT0eOJw+0O6MYngmCgkXfFsgBC/B1plaE596hHo58FHxzCNiLFdvfRj37rxujvqsDAkADWUmOzLzLHYMXu302HzDqAMNY6FZJc32y4ZDsIQpaUOAuiNHAHwFXuPRInVpCqztfJMgw4RhOhcCTEsoIJsqoIN1t4M0pEVAv6x3nJwFKZqSNOZrQ7sOW32FjwWS3kHwRsCTtqdk5n2KxU6wr/fggV3QsSPRMYro8sUfwu93mqggtswwWqfeKlsz5WiaR9aqLnb8z1R6HLvA0bcoPWzjgn8RdP+9we4z06iZ5vdBuNpwBjrCKUELWISyAoekLGGxyS8pPqYiSBRNUoaPITSuUjcCBbJ9EFvm72QgCBesbwF71KPabTPbMPhLmf+uAi+zmeu8ZeVvT6DrX9OHSkIvIEQFry9BrqOT3ce6KBHSO1HpXIetj5Wcd3WHXtz9ulBL9ikWC8eh7/+we51ucmLvFzNKznElhT2Dp+czXUVNEUjp3u/66pyRA4", "gossipEndpoint": [{ "ipAddressV4": "IqyjSg==", "port": 30127 }, { "ipAddressV4": "CoAAIA==", "port": 30127 }] }, { "nodeId": "4", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIId2RXuetTjnAwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlNTAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlNTCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKw8NsPZVKyXW3smNAUcoN/JOyhyJah6Y2gPOCek6hcOaLjg6DvZxLzinpLKwScrgRIIiuU/9hUsy6EiOtCuX3lT4ByKA5XdN1YQAXL1TS00vUf7BR1b+PW58gaJtepZctHvyklNxFeVT/vq2zWQa1sXeTz0OReJXO+8WWO1AjciYyv963zZ6jvk5yhWGukC5NJ2pJTjFh3+2+PivnLevNNnW6bZkdgSl3MThr78nsWlUQykvx+FNIgLlrq+4fCIFzXMeJRRXqo0MJlsxBfQaSu1arqMGE5BQWBEr51EH9UKNP3MSEntr1h1WMeJ3tZXFJ/z1xzKxsVII8HNtjynv1MoEGY4AQDUWI0CRUBv0uAmBUljGJVyqgHIPO6OGznJdkQsmSOSpqdVeNSWEd85Nh0Niorvpat9g5vUJ9zWKPWmgiww+RQiqq8jA2BiD/3n+I8UIMbMShiJUQBmnveAEWLMeV6TIuIBllGQ0a5oB+PQOZh0g+TvsjnH8/c1h1MSHQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQBYPcTDNQjigDZ2iY/nA9BhiyNJDHQBecLoADQ+5mxGrGclUcvd002ovbVhoGlcMkVyG/Am9/Y3t6zpO4pMQy7Jecfx8U8+VtKEPFkpfslCmEHT9EpnCGcS5t1KB2KcqnZ57f9uUZLBtMPem1FO6wcT71TXr4/+hGNuLpLmtKkQWARnVURR1/MHhcJ35BH1/x1VIeNc+PBpTE/blszn69Gwqt/HlpNTnYfqS0vhTCfOO07dxn8n904xb1nZ0l0k7pCQdRTTn+dYxmvQ7MlTRK5iOlOKIiRAbD8Fwyfo93cvDj6V56c7Gz+knyjz0iARMsptumqevmfiJ324HzV/12SukJokFDl9G6MjG99ucg3CQaIxa5kRVN1b5D+QMeowfj0lKTchGm/BbuO4zousIE+aWCGfy41CccTP11JW2quwjsVGufb5fvCfDXop5f9i1+95oHd2JyLmHDnJBWEqBHret4Dx8P8GyQhyZB6jkZpVwJy/ruPyrAL3kcKqUZecaOg=", "gossipEndpoint": [{ "ipAddressV4": "IhzSrg==", "port": 30128 }, { "ipAddressV4": "CoAAHQ==", "port": 30128 }] }] } | |||||||||
| node4 | 9.438s | 2025-10-30 11:17:48.905 | 36 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/4/ConsistencyTestLog.csv | |
| node4 | 9.439s | 2025-10-30 11:17:48.906 | 37 | INFO | STARTUP | <main> | TransactionHandlingHistory: | No log file found. Starting without any previous history | |
| node4 | 9.458s | 2025-10-30 11:17:48.925 | 38 | INFO | STARTUP | <main> | StateInitializer: | The platform is using the following initial state: | |
| Round: 0 Timestamp: 1970-01-01T00:00:00Z Next consensus number: 0 Legacy running event hash: null Legacy running event mnemonic: null Rounds non-ancient: 0 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1 Root hash: 50e1f7af3dbb06725582ec8e0fa7008684f3548b84c370837717fad94446293af8830cf44bf0cbc6976fbedeb97017bf (root) VirtualMap state / bring-any-resist-soft | |||||||||
| node4 | 9.464s | 2025-10-30 11:17:48.931 | 40 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Starting the ReconnectController | |
| node4 | 9.728s | 2025-10-30 11:17:49.195 | 41 | INFO | EVENT_STREAM | <main> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: 38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b | |
| node4 | 9.735s | 2025-10-30 11:17:49.202 | 42 | INFO | STARTUP | <platformForkJoinThread-2> | Shadowgraph: | Shadowgraph starting from expiration threshold 1 | |
| node4 | 9.741s | 2025-10-30 11:17:49.208 | 43 | INFO | STARTUP | <<start-node-4>> | ConsistencyTestingToolMain: | init called in Main for node 4. | |
| node4 | 9.742s | 2025-10-30 11:17:49.209 | 44 | INFO | STARTUP | <<start-node-4>> | SwirldsPlatform: | Starting platform 4 | |
| node4 | 9.743s | 2025-10-30 11:17:49.210 | 45 | INFO | STARTUP | <<platform: recycle-bin-cleanup>> | RecycleBinImpl: | Deleted 0 files from the recycle bin. | |
| node4 | 9.747s | 2025-10-30 11:17:49.214 | 46 | INFO | STARTUP | <<start-node-4>> | CycleFinder: | No cyclical back pressure detected in wiring model. | |
| node4 | 9.748s | 2025-10-30 11:17:49.215 | 47 | INFO | STARTUP | <<start-node-4>> | DirectSchedulerChecks: | No illegal direct scheduler use detected in the wiring model. | |
| node4 | 9.748s | 2025-10-30 11:17:49.215 | 48 | INFO | STARTUP | <<start-node-4>> | InputWireChecks: | All input wires have been bound. | |
| node4 | 9.750s | 2025-10-30 11:17:49.217 | 49 | WARN | STARTUP | <<start-node-4>> | PcesFileTracker: | No preconsensus event files available | |
| node4 | 9.751s | 2025-10-30 11:17:49.218 | 50 | INFO | STARTUP | <<start-node-4>> | SwirldsPlatform: | replaying preconsensus event stream starting at 0 | |
| node4 | 9.754s | 2025-10-30 11:17:49.221 | 51 | INFO | STARTUP | <<start-node-4>> | PcesReplayer: | Replayed 0 preconsensus events with max birth round -1. These events contained 0 transactions. 0 rounds reached consensus spanning 0.0 nanoseconds of consensus time. The latest round to reach consensus is round 0. Replay took 0.0 nanoseconds. | |
| node4 | 9.756s | 2025-10-30 11:17:49.223 | 52 | INFO | STARTUP | <<app: appMain 4>> | ConsistencyTestingToolMain: | run called in Main. | |
| node4 | 9.758s | 2025-10-30 11:17:49.225 | 53 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 219.0 ms in STARTING_UP. Now in REPLAYING_EVENTS | |
| node4 | 9.765s | 2025-10-30 11:17:49.232 | 54 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 5.0 ms in REPLAYING_EVENTS. Now in OBSERVING | |
| node3 | 11.573s | 2025-10-30 11:17:51.040 | 55 | INFO | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Initializing statistics output in CSV format [ csvOutputFolder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats', csvFileName = 'PlatformTesting3.csv' ] | |
| node3 | 11.574s | 2025-10-30 11:17:51.041 | 56 | DEBUG | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Using the existing metrics folder [ folder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats' ] | |
| node4 | 12.756s | 2025-10-30 11:17:52.223 | 55 | INFO | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Initializing statistics output in CSV format [ csvOutputFolder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats', csvFileName = 'PlatformTesting4.csv' ] | |
| node4 | 12.759s | 2025-10-30 11:17:52.226 | 56 | DEBUG | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Using the existing metrics folder [ folder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats' ] | |
| node0 | 16.092s | 2025-10-30 11:17:55.559 | 57 | INFO | PLATFORM_STATUS | <platformForkJoinThread-2> | StatusStateMachine: | Platform spent 10.1 s in OBSERVING. Now in CHECKING | |
| node2 | 16.344s | 2025-10-30 11:17:55.811 | 57 | INFO | PLATFORM_STATUS | <platformForkJoinThread-3> | StatusStateMachine: | Platform spent 10.1 s in OBSERVING. Now in CHECKING | |
| node1 | 16.400s | 2025-10-30 11:17:55.867 | 57 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 10.1 s in OBSERVING. Now in CHECKING | |
| node2 | 17.222s | 2025-10-30 11:17:56.689 | 59 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 1 created, will eventually be written to disk, for reason: FIRST_ROUND_AFTER_GENESIS | |
| node1 | 17.260s | 2025-10-30 11:17:56.727 | 59 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 1 created, will eventually be written to disk, for reason: FIRST_ROUND_AFTER_GENESIS | |
| node0 | 17.279s | 2025-10-30 11:17:56.746 | 58 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 1.2 s in CHECKING. Now in ACTIVE | |
| node0 | 17.280s | 2025-10-30 11:17:56.747 | 60 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 1 created, will eventually be written to disk, for reason: FIRST_ROUND_AFTER_GENESIS | |
| node3 | 17.519s | 2025-10-30 11:17:56.986 | 58 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 1 created, will eventually be written to disk, for reason: FIRST_ROUND_AFTER_GENESIS | |
| node3 | 17.532s | 2025-10-30 11:17:56.999 | 73 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 1 state to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/1 | |
| node4 | 17.533s | 2025-10-30 11:17:57.000 | 58 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 1 created, will eventually be written to disk, for reason: FIRST_ROUND_AFTER_GENESIS | |
| node3 | 17.534s | 2025-10-30 11:17:57.001 | 74 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node4 | 17.557s | 2025-10-30 11:17:57.024 | 73 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 1 state to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/1 | |
| node4 | 17.559s | 2025-10-30 11:17:57.026 | 74 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node2 | 17.608s | 2025-10-30 11:17:57.075 | 74 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 1 state to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/1 | |
| node2 | 17.609s | 2025-10-30 11:17:57.076 | 75 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node1 | 17.633s | 2025-10-30 11:17:57.100 | 74 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 1 state to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/1 | |
| node1 | 17.635s | 2025-10-30 11:17:57.102 | 75 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node0 | 17.641s | 2025-10-30 11:17:57.108 | 75 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 1 state to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/1 | |
| node0 | 17.644s | 2025-10-30 11:17:57.111 | 76 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node2 | 17.680s | 2025-10-30 11:17:57.147 | 91 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 1.3 s in CHECKING. Now in ACTIVE | |
| node1 | 17.709s | 2025-10-30 11:17:57.176 | 91 | INFO | PLATFORM_STATUS | <platformForkJoinThread-3> | StatusStateMachine: | Platform spent 1.3 s in CHECKING. Now in ACTIVE | |
| node3 | 17.795s | 2025-10-30 11:17:57.262 | 107 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node3 | 17.798s | 2025-10-30 11:17:57.265 | 108 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 1 Timestamp: 2025-10-30T11:17:55.754555433Z Next consensus number: 1 Legacy running event hash: 572aa7503d7ab60e134cbfb3597890c3c59c7fb968911040bb9f32f5c9bd447b603e767c0fa063ee7faa5f2f80e86d87 Legacy running event mnemonic: torch-bid-quantum-dinner Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1450302654 Root hash: abd8cca0002d72137184db81f4124c32e1fb82f546306afff528f7fc1e584b5fb4cae8edc59ce8ae1511ddb11d7de871 (root) VirtualMap state / lunar-hire-aisle-draw | |||||||||
| node3 | 17.840s | 2025-10-30 11:17:57.307 | 109 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 17.841s | 2025-10-30 11:17:57.308 | 110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 17.842s | 2025-10-30 11:17:57.309 | 111 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 17.843s | 2025-10-30 11:17:57.310 | 112 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 17.846s | 2025-10-30 11:17:57.313 | 105 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node2 | 17.847s | 2025-10-30 11:17:57.314 | 110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node2 | 17.850s | 2025-10-30 11:17:57.317 | 111 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 1 Timestamp: 2025-10-30T11:17:55.754555433Z Next consensus number: 1 Legacy running event hash: 572aa7503d7ab60e134cbfb3597890c3c59c7fb968911040bb9f32f5c9bd447b603e767c0fa063ee7faa5f2f80e86d87 Legacy running event mnemonic: torch-bid-quantum-dinner Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1450302654 Root hash: abd8cca0002d72137184db81f4124c32e1fb82f546306afff528f7fc1e584b5fb4cae8edc59ce8ae1511ddb11d7de871 (root) VirtualMap state / lunar-hire-aisle-draw | |||||||||
| node4 | 17.850s | 2025-10-30 11:17:57.317 | 106 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 1 Timestamp: 2025-10-30T11:17:55.754555433Z Next consensus number: 1 Legacy running event hash: 572aa7503d7ab60e134cbfb3597890c3c59c7fb968911040bb9f32f5c9bd447b603e767c0fa063ee7faa5f2f80e86d87 Legacy running event mnemonic: torch-bid-quantum-dinner Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1450302654 Root hash: abd8cca0002d72137184db81f4124c32e1fb82f546306afff528f7fc1e584b5fb4cae8edc59ce8ae1511ddb11d7de871 (root) VirtualMap state / lunar-hire-aisle-draw | |||||||||
| node3 | 17.851s | 2025-10-30 11:17:57.318 | 113 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 1 to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/1 {"round":1,"freezeState":false,"reason":"FIRST_ROUND_AFTER_GENESIS","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/1/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 17.874s | 2025-10-30 11:17:57.341 | 110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node0 | 17.877s | 2025-10-30 11:17:57.344 | 109 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/1 for round 1 | |
| node1 | 17.877s | 2025-10-30 11:17:57.344 | 111 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 1 Timestamp: 2025-10-30T11:17:55.754555433Z Next consensus number: 1 Legacy running event hash: 572aa7503d7ab60e134cbfb3597890c3c59c7fb968911040bb9f32f5c9bd447b603e767c0fa063ee7faa5f2f80e86d87 Legacy running event mnemonic: torch-bid-quantum-dinner Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1450302654 Root hash: abd8cca0002d72137184db81f4124c32e1fb82f546306afff528f7fc1e584b5fb4cae8edc59ce8ae1511ddb11d7de871 (root) VirtualMap state / lunar-hire-aisle-draw | |||||||||
| node0 | 17.880s | 2025-10-30 11:17:57.347 | 110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 1 Timestamp: 2025-10-30T11:17:55.754555433Z Next consensus number: 1 Legacy running event hash: 572aa7503d7ab60e134cbfb3597890c3c59c7fb968911040bb9f32f5c9bd447b603e767c0fa063ee7faa5f2f80e86d87 Legacy running event mnemonic: torch-bid-quantum-dinner Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1450302654 Root hash: abd8cca0002d72137184db81f4124c32e1fb82f546306afff528f7fc1e584b5fb4cae8edc59ce8ae1511ddb11d7de871 (root) VirtualMap state / lunar-hire-aisle-draw | |||||||||
| node2 | 17.885s | 2025-10-30 11:17:57.352 | 112 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 17.885s | 2025-10-30 11:17:57.352 | 113 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 17.886s | 2025-10-30 11:17:57.353 | 114 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 17.887s | 2025-10-30 11:17:57.354 | 115 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 17.892s | 2025-10-30 11:17:57.359 | 116 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 1 to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/1 {"round":1,"freezeState":false,"reason":"FIRST_ROUND_AFTER_GENESIS","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/1/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 17.892s | 2025-10-30 11:17:57.359 | 107 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 17.893s | 2025-10-30 11:17:57.360 | 108 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 17.893s | 2025-10-30 11:17:57.360 | 109 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node4 | 17.894s | 2025-10-30 11:17:57.361 | 110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 17.902s | 2025-10-30 11:17:57.369 | 111 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 1 to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/1 {"round":1,"freezeState":false,"reason":"FIRST_ROUND_AFTER_GENESIS","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/1/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 17.913s | 2025-10-30 11:17:57.380 | 112 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 17.914s | 2025-10-30 11:17:57.381 | 111 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 17.914s | 2025-10-30 11:17:57.381 | 112 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 17.914s | 2025-10-30 11:17:57.381 | 113 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 17.914s | 2025-10-30 11:17:57.381 | 114 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 17.915s | 2025-10-30 11:17:57.382 | 113 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 17.916s | 2025-10-30 11:17:57.383 | 114 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 17.916s | 2025-10-30 11:17:57.383 | 115 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 17.921s | 2025-10-30 11:17:57.388 | 115 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 1 to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/1 {"round":1,"freezeState":false,"reason":"FIRST_ROUND_AFTER_GENESIS","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/1/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 17.921s | 2025-10-30 11:17:57.388 | 116 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 1 to disk. Reason: FIRST_ROUND_AFTER_GENESIS, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/1 {"round":1,"freezeState":false,"reason":"FIRST_ROUND_AFTER_GENESIS","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/1/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 18.669s | 2025-10-30 11:17:58.136 | 134 | INFO | PLATFORM_STATUS | <platformForkJoinThread-1> | StatusStateMachine: | Platform spent 10.1 s in OBSERVING. Now in CHECKING | |
| node4 | 19.850s | 2025-10-30 11:17:59.317 | 162 | INFO | PLATFORM_STATUS | <platformForkJoinThread-4> | StatusStateMachine: | Platform spent 10.1 s in OBSERVING. Now in CHECKING | |
| node3 | 20.180s | 2025-10-30 11:17:59.647 | 178 | INFO | PLATFORM_STATUS | <platformForkJoinThread-5> | StatusStateMachine: | Platform spent 1.5 s in CHECKING. Now in ACTIVE | |
| node4 | 21.408s | 2025-10-30 11:18:00.875 | 195 | INFO | PLATFORM_STATUS | <platformForkJoinThread-8> | StatusStateMachine: | Platform spent 1.6 s in CHECKING. Now in ACTIVE | |
| node1 | 22.209s | 2025-10-30 11:18:01.676 | 210 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 11 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 22.240s | 2025-10-30 11:18:01.707 | 209 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 11 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node2 | 22.274s | 2025-10-30 11:18:01.741 | 210 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 11 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 22.284s | 2025-10-30 11:18:01.751 | 209 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 11 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node4 | 22.320s | 2025-10-30 11:18:01.787 | 205 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 11 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node2 | 22.384s | 2025-10-30 11:18:01.851 | 214 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 11 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/11 | |
| node2 | 22.384s | 2025-10-30 11:18:01.851 | 215 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node1 | 22.412s | 2025-10-30 11:18:01.879 | 214 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 11 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/11 | |
| node1 | 22.413s | 2025-10-30 11:18:01.880 | 215 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node0 | 22.415s | 2025-10-30 11:18:01.882 | 213 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 11 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/11 | |
| node0 | 22.416s | 2025-10-30 11:18:01.883 | 214 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node3 | 22.429s | 2025-10-30 11:18:01.896 | 211 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 11 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/11 | |
| node3 | 22.432s | 2025-10-30 11:18:01.899 | 214 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node2 | 22.466s | 2025-10-30 11:18:01.933 | 258 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node2 | 22.468s | 2025-10-30 11:18:01.935 | 259 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 11 Timestamp: 2025-10-30T11:18:00.316739Z Next consensus number: 180 Legacy running event hash: b8f3a797009a94a05e7293ccb9c51c72baced454e48fdcc0a201b60e9e1b8143881767ff7cfa3616887173af541fa1fb Legacy running event mnemonic: ticket-artwork-battle-hire Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 397415135 Root hash: 63a81afe7796947754136983332c81b1fe6e959b6f2555f185b4dce82a2ca330cd5333796f8603c1423893ac26044903 (root) VirtualMap state / insect-garlic-version-illness | |||||||||
| node4 | 22.468s | 2025-10-30 11:18:01.935 | 209 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 11 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/11 | |
| node4 | 22.469s | 2025-10-30 11:18:01.936 | 210 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node2 | 22.477s | 2025-10-30 11:18:01.944 | 260 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 22.477s | 2025-10-30 11:18:01.944 | 261 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 22.478s | 2025-10-30 11:18:01.945 | 262 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 22.478s | 2025-10-30 11:18:01.945 | 263 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 22.479s | 2025-10-30 11:18:01.946 | 264 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 11 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/11 {"round":11,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/11/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node0 | 22.499s | 2025-10-30 11:18:01.966 | 247 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node0 | 22.502s | 2025-10-30 11:18:01.969 | 248 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 11 Timestamp: 2025-10-30T11:18:00.316739Z Next consensus number: 180 Legacy running event hash: b8f3a797009a94a05e7293ccb9c51c72baced454e48fdcc0a201b60e9e1b8143881767ff7cfa3616887173af541fa1fb Legacy running event mnemonic: ticket-artwork-battle-hire Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 397415135 Root hash: 63a81afe7796947754136983332c81b1fe6e959b6f2555f185b4dce82a2ca330cd5333796f8603c1423893ac26044903 (root) VirtualMap state / insect-garlic-version-illness | |||||||||
| node1 | 22.502s | 2025-10-30 11:18:01.969 | 258 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node1 | 22.504s | 2025-10-30 11:18:01.971 | 259 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 11 Timestamp: 2025-10-30T11:18:00.316739Z Next consensus number: 180 Legacy running event hash: b8f3a797009a94a05e7293ccb9c51c72baced454e48fdcc0a201b60e9e1b8143881767ff7cfa3616887173af541fa1fb Legacy running event mnemonic: ticket-artwork-battle-hire Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 397415135 Root hash: 63a81afe7796947754136983332c81b1fe6e959b6f2555f185b4dce82a2ca330cd5333796f8603c1423893ac26044903 (root) VirtualMap state / insect-garlic-version-illness | |||||||||
| node0 | 22.510s | 2025-10-30 11:18:01.977 | 249 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 22.511s | 2025-10-30 11:18:01.978 | 250 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 22.511s | 2025-10-30 11:18:01.978 | 251 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 22.512s | 2025-10-30 11:18:01.979 | 252 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 22.512s | 2025-10-30 11:18:01.979 | 253 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 11 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/11 {"round":11,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/11/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 22.516s | 2025-10-30 11:18:01.983 | 260 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 22.516s | 2025-10-30 11:18:01.983 | 261 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 22.517s | 2025-10-30 11:18:01.984 | 262 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 22.517s | 2025-10-30 11:18:01.984 | 263 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 22.518s | 2025-10-30 11:18:01.985 | 264 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 11 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/11 {"round":11,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/11/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 22.518s | 2025-10-30 11:18:01.985 | 247 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node3 | 22.521s | 2025-10-30 11:18:01.988 | 248 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 11 Timestamp: 2025-10-30T11:18:00.316739Z Next consensus number: 180 Legacy running event hash: b8f3a797009a94a05e7293ccb9c51c72baced454e48fdcc0a201b60e9e1b8143881767ff7cfa3616887173af541fa1fb Legacy running event mnemonic: ticket-artwork-battle-hire Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 397415135 Root hash: 63a81afe7796947754136983332c81b1fe6e959b6f2555f185b4dce82a2ca330cd5333796f8603c1423893ac26044903 (root) VirtualMap state / insect-garlic-version-illness | |||||||||
| node3 | 22.528s | 2025-10-30 11:18:01.995 | 249 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 22.528s | 2025-10-30 11:18:01.995 | 250 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 22.529s | 2025-10-30 11:18:01.996 | 251 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 22.529s | 2025-10-30 11:18:01.996 | 252 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 22.530s | 2025-10-30 11:18:01.997 | 253 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 11 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/11 {"round":11,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/11/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 22.579s | 2025-10-30 11:18:02.046 | 253 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/8 for round 11 | |
| node4 | 22.582s | 2025-10-30 11:18:02.049 | 254 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 11 Timestamp: 2025-10-30T11:18:00.316739Z Next consensus number: 180 Legacy running event hash: b8f3a797009a94a05e7293ccb9c51c72baced454e48fdcc0a201b60e9e1b8143881767ff7cfa3616887173af541fa1fb Legacy running event mnemonic: ticket-artwork-battle-hire Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 397415135 Root hash: 63a81afe7796947754136983332c81b1fe6e959b6f2555f185b4dce82a2ca330cd5333796f8603c1423893ac26044903 (root) VirtualMap state / insect-garlic-version-illness | |||||||||
| node4 | 22.596s | 2025-10-30 11:18:02.063 | 255 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 22.597s | 2025-10-30 11:18:02.064 | 256 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 1 File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 22.599s | 2025-10-30 11:18:02.066 | 257 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node4 | 22.600s | 2025-10-30 11:18:02.067 | 258 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 22.601s | 2025-10-30 11:18:02.068 | 259 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 11 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/11 {"round":11,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/11/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 1m 22.036s | 2025-10-30 11:19:01.503 | 1664 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 139 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node2 | 1m 22.080s | 2025-10-30 11:19:01.547 | 1668 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 139 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 1m 22.097s | 2025-10-30 11:19:01.564 | 1687 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 139 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 1m 22.098s | 2025-10-30 11:19:01.565 | 1669 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 139 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node4 | 1m 22.108s | 2025-10-30 11:19:01.575 | 1689 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 139 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 1m 22.291s | 2025-10-30 11:19:01.758 | 1672 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 139 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/139 | |
| node0 | 1m 22.291s | 2025-10-30 11:19:01.758 | 1673 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node1 | 1m 22.296s | 2025-10-30 11:19:01.763 | 1667 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 139 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/139 | |
| node1 | 1m 22.297s | 2025-10-30 11:19:01.764 | 1668 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node3 | 1m 22.354s | 2025-10-30 11:19:01.821 | 1690 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 139 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/139 | |
| node3 | 1m 22.355s | 2025-10-30 11:19:01.822 | 1691 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node0 | 1m 22.369s | 2025-10-30 11:19:01.836 | 1704 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node0 | 1m 22.371s | 2025-10-30 11:19:01.838 | 1705 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 139 Timestamp: 2025-10-30T11:19:00.515854Z Next consensus number: 5018 Legacy running event hash: 815ea84c80477c94aa5a0f39e3f421d12684d1b62662d08d31e05ec6624a5d209ea8d149278da34f1f5b873b3c1147cb Legacy running event mnemonic: stairs-walk-warm-myth Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -36562392 Root hash: 5eedf8ba94a3e596b2aac70828947308bd7657dc7863a7f30d67c1b109f5900cf6f1b1ea31d41f97ac12f51b53ef10e2 (root) VirtualMap state / tank-trip-hill-essence | |||||||||
| node1 | 1m 22.371s | 2025-10-30 11:19:01.838 | 1703 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node1 | 1m 22.373s | 2025-10-30 11:19:01.840 | 1704 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 139 Timestamp: 2025-10-30T11:19:00.515854Z Next consensus number: 5018 Legacy running event hash: 815ea84c80477c94aa5a0f39e3f421d12684d1b62662d08d31e05ec6624a5d209ea8d149278da34f1f5b873b3c1147cb Legacy running event mnemonic: stairs-walk-warm-myth Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -36562392 Root hash: 5eedf8ba94a3e596b2aac70828947308bd7657dc7863a7f30d67c1b109f5900cf6f1b1ea31d41f97ac12f51b53ef10e2 (root) VirtualMap state / tank-trip-hill-essence | |||||||||
| node4 | 1m 22.377s | 2025-10-30 11:19:01.844 | 1692 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 139 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/139 | |
| node4 | 1m 22.379s | 2025-10-30 11:19:01.846 | 1693 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node0 | 1m 22.380s | 2025-10-30 11:19:01.847 | 1706 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 1m 22.380s | 2025-10-30 11:19:01.847 | 1707 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 112 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 1m 22.380s | 2025-10-30 11:19:01.847 | 1708 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 1m 22.380s | 2025-10-30 11:19:01.847 | 1705 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 1m 22.381s | 2025-10-30 11:19:01.848 | 1706 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 112 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 1m 22.381s | 2025-10-30 11:19:01.848 | 1707 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 1m 22.384s | 2025-10-30 11:19:01.851 | 1709 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 1m 22.384s | 2025-10-30 11:19:01.851 | 1708 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 1m 22.385s | 2025-10-30 11:19:01.852 | 1710 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 139 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/139 {"round":139,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/139/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 1m 22.385s | 2025-10-30 11:19:01.852 | 1709 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 139 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/139 {"round":139,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/139/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 1m 22.424s | 2025-10-30 11:19:01.891 | 1671 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 139 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/139 | |
| node2 | 1m 22.425s | 2025-10-30 11:19:01.892 | 1672 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node3 | 1m 22.439s | 2025-10-30 11:19:01.906 | 1730 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node3 | 1m 22.441s | 2025-10-30 11:19:01.908 | 1731 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 139 Timestamp: 2025-10-30T11:19:00.515854Z Next consensus number: 5018 Legacy running event hash: 815ea84c80477c94aa5a0f39e3f421d12684d1b62662d08d31e05ec6624a5d209ea8d149278da34f1f5b873b3c1147cb Legacy running event mnemonic: stairs-walk-warm-myth Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -36562392 Root hash: 5eedf8ba94a3e596b2aac70828947308bd7657dc7863a7f30d67c1b109f5900cf6f1b1ea31d41f97ac12f51b53ef10e2 (root) VirtualMap state / tank-trip-hill-essence | |||||||||
| node3 | 1m 22.451s | 2025-10-30 11:19:01.918 | 1732 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 1m 22.452s | 2025-10-30 11:19:01.919 | 1733 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 112 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 1m 22.452s | 2025-10-30 11:19:01.919 | 1734 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 1m 22.455s | 2025-10-30 11:19:01.922 | 1735 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 1m 22.456s | 2025-10-30 11:19:01.923 | 1736 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 139 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/139 {"round":139,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/139/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 1m 22.464s | 2025-10-30 11:19:01.931 | 1724 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node4 | 1m 22.468s | 2025-10-30 11:19:01.935 | 1725 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 139 Timestamp: 2025-10-30T11:19:00.515854Z Next consensus number: 5018 Legacy running event hash: 815ea84c80477c94aa5a0f39e3f421d12684d1b62662d08d31e05ec6624a5d209ea8d149278da34f1f5b873b3c1147cb Legacy running event mnemonic: stairs-walk-warm-myth Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -36562392 Root hash: 5eedf8ba94a3e596b2aac70828947308bd7657dc7863a7f30d67c1b109f5900cf6f1b1ea31d41f97ac12f51b53ef10e2 (root) VirtualMap state / tank-trip-hill-essence | |||||||||
| node4 | 1m 22.480s | 2025-10-30 11:19:01.947 | 1726 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 1m 22.480s | 2025-10-30 11:19:01.947 | 1727 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 112 File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 1m 22.481s | 2025-10-30 11:19:01.948 | 1728 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node4 | 1m 22.484s | 2025-10-30 11:19:01.951 | 1729 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 1m 22.485s | 2025-10-30 11:19:01.952 | 1730 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 139 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/139 {"round":139,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/139/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 1m 22.501s | 2025-10-30 11:19:01.968 | 1711 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/15 for round 139 | |
| node2 | 1m 22.503s | 2025-10-30 11:19:01.970 | 1712 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 139 Timestamp: 2025-10-30T11:19:00.515854Z Next consensus number: 5018 Legacy running event hash: 815ea84c80477c94aa5a0f39e3f421d12684d1b62662d08d31e05ec6624a5d209ea8d149278da34f1f5b873b3c1147cb Legacy running event mnemonic: stairs-walk-warm-myth Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -36562392 Root hash: 5eedf8ba94a3e596b2aac70828947308bd7657dc7863a7f30d67c1b109f5900cf6f1b1ea31d41f97ac12f51b53ef10e2 (root) VirtualMap state / tank-trip-hill-essence | |||||||||
| node2 | 1m 22.512s | 2025-10-30 11:19:01.979 | 1713 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 1m 22.512s | 2025-10-30 11:19:01.979 | 1714 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 112 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 1m 22.512s | 2025-10-30 11:19:01.979 | 1715 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 1m 22.516s | 2025-10-30 11:19:01.983 | 1716 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 1m 22.517s | 2025-10-30 11:19:01.984 | 1717 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 139 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/139 {"round":139,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/139/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 2m 22.030s | 2025-10-30 11:20:01.497 | 3049 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 262 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 2m 22.038s | 2025-10-30 11:20:01.505 | 3039 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 262 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node4 | 2m 22.059s | 2025-10-30 11:20:01.526 | 3060 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 262 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 2m 22.114s | 2025-10-30 11:20:01.581 | 3061 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 262 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node1 | 2m 22.122s | 2025-10-30 11:20:01.589 | 3039 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 262 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 2m 22.259s | 2025-10-30 11:20:01.726 | 3066 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 262 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/262 | |
| node3 | 2m 22.260s | 2025-10-30 11:20:01.727 | 3069 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node0 | 2m 22.323s | 2025-10-30 11:20:01.790 | 3044 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 262 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/262 | |
| node0 | 2m 22.324s | 2025-10-30 11:20:01.791 | 3047 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node1 | 2m 22.329s | 2025-10-30 11:20:01.796 | 3047 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 262 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/262 | |
| node1 | 2m 22.329s | 2025-10-30 11:20:01.796 | 3048 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node2 | 2m 22.336s | 2025-10-30 11:20:01.803 | 3052 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 262 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/262 | |
| node2 | 2m 22.337s | 2025-10-30 11:20:01.804 | 3053 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node3 | 2m 22.337s | 2025-10-30 11:20:01.804 | 3104 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node3 | 2m 22.340s | 2025-10-30 11:20:01.807 | 3105 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 262 Timestamp: 2025-10-30T11:20:00.075054579Z Next consensus number: 9796 Legacy running event hash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 Legacy running event mnemonic: hour-soup-element-giant Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 59014429 Root hash: 8426f4d659f6d33d246b89ebbc038f302915037497d2f857fea83bc4aba823bd76b181995088df51263d7ca10351e201 (root) VirtualMap state / kitten-grab-disagree-observe | |||||||||
| node3 | 2m 22.346s | 2025-10-30 11:20:01.813 | 3106 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 2m 22.346s | 2025-10-30 11:20:01.813 | 3107 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 235 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 2m 22.347s | 2025-10-30 11:20:01.814 | 3108 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 2m 22.354s | 2025-10-30 11:20:01.821 | 3109 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 2m 22.354s | 2025-10-30 11:20:01.821 | 3110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 262 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/262 {"round":262,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/262/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 2m 22.402s | 2025-10-30 11:20:01.869 | 3083 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node1 | 2m 22.404s | 2025-10-30 11:20:01.871 | 3084 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 262 Timestamp: 2025-10-30T11:20:00.075054579Z Next consensus number: 9796 Legacy running event hash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 Legacy running event mnemonic: hour-soup-element-giant Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 59014429 Root hash: 8426f4d659f6d33d246b89ebbc038f302915037497d2f857fea83bc4aba823bd76b181995088df51263d7ca10351e201 (root) VirtualMap state / kitten-grab-disagree-observe | |||||||||
| node0 | 2m 22.405s | 2025-10-30 11:20:01.872 | 3098 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node0 | 2m 22.407s | 2025-10-30 11:20:01.874 | 3099 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 262 Timestamp: 2025-10-30T11:20:00.075054579Z Next consensus number: 9796 Legacy running event hash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 Legacy running event mnemonic: hour-soup-element-giant Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 59014429 Root hash: 8426f4d659f6d33d246b89ebbc038f302915037497d2f857fea83bc4aba823bd76b181995088df51263d7ca10351e201 (root) VirtualMap state / kitten-grab-disagree-observe | |||||||||
| node1 | 2m 22.410s | 2025-10-30 11:20:01.877 | 3085 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 2m 22.411s | 2025-10-30 11:20:01.878 | 3086 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 235 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 2m 22.411s | 2025-10-30 11:20:01.878 | 3087 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 2m 22.414s | 2025-10-30 11:20:01.881 | 3100 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 2m 22.414s | 2025-10-30 11:20:01.881 | 3101 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 235 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 2m 22.414s | 2025-10-30 11:20:01.881 | 3102 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 2m 22.416s | 2025-10-30 11:20:01.883 | 3091 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node1 | 2m 22.418s | 2025-10-30 11:20:01.885 | 3088 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 2m 22.418s | 2025-10-30 11:20:01.885 | 3089 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 262 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/262 {"round":262,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/262/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 2m 22.418s | 2025-10-30 11:20:01.885 | 3092 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 262 Timestamp: 2025-10-30T11:20:00.075054579Z Next consensus number: 9796 Legacy running event hash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 Legacy running event mnemonic: hour-soup-element-giant Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 59014429 Root hash: 8426f4d659f6d33d246b89ebbc038f302915037497d2f857fea83bc4aba823bd76b181995088df51263d7ca10351e201 (root) VirtualMap state / kitten-grab-disagree-observe | |||||||||
| node0 | 2m 22.421s | 2025-10-30 11:20:01.888 | 3103 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 2m 22.422s | 2025-10-30 11:20:01.889 | 3104 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 262 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/262 {"round":262,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/262/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 2m 22.425s | 2025-10-30 11:20:01.892 | 3066 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 262 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/262 | |
| node2 | 2m 22.426s | 2025-10-30 11:20:01.893 | 3093 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 2m 22.426s | 2025-10-30 11:20:01.893 | 3069 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node2 | 2m 22.427s | 2025-10-30 11:20:01.894 | 3094 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 235 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 2m 22.427s | 2025-10-30 11:20:01.894 | 3095 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 2m 22.434s | 2025-10-30 11:20:01.901 | 3096 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 2m 22.434s | 2025-10-30 11:20:01.901 | 3097 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 262 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/262 {"round":262,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/262/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 2m 22.520s | 2025-10-30 11:20:01.987 | 3104 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/22 for round 262 | |
| node4 | 2m 22.524s | 2025-10-30 11:20:01.991 | 3105 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 262 Timestamp: 2025-10-30T11:20:00.075054579Z Next consensus number: 9796 Legacy running event hash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 Legacy running event mnemonic: hour-soup-element-giant Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 59014429 Root hash: 8426f4d659f6d33d246b89ebbc038f302915037497d2f857fea83bc4aba823bd76b181995088df51263d7ca10351e201 (root) VirtualMap state / kitten-grab-disagree-observe | |||||||||
| node4 | 2m 22.534s | 2025-10-30 11:20:02.001 | 3106 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 2m 22.534s | 2025-10-30 11:20:02.001 | 3107 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 235 File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node4 | 2m 22.535s | 2025-10-30 11:20:02.002 | 3108 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node4 | 2m 22.542s | 2025-10-30 11:20:02.009 | 3109 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 2m 22.543s | 2025-10-30 11:20:02.010 | 3110 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 262 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/262 {"round":262,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/262/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 3m 15.945s | 2025-10-30 11:20:55.412 | 4374 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith4 1 to 4>> | NetworkUtils: | Connection broken: 1 -> 4 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:20:55.410387254Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node2 | 3m 15.945s | 2025-10-30 11:20:55.412 | 4376 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith4 2 to 4>> | NetworkUtils: | Connection broken: 2 -> 4 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:20:55.410143914Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node3 | 3m 15.945s | 2025-10-30 11:20:55.412 | 4413 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith4 3 to 4>> | NetworkUtils: | Connection broken: 3 -> 4 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:20:55.410252807Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node0 | 3m 15.947s | 2025-10-30 11:20:55.414 | 4379 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith4 0 to 4>> | NetworkUtils: | Connection broken: 0 -> 4 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:20:55.410737503Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node0 | 3m 21.757s | 2025-10-30 11:21:01.224 | 4537 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 392 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 3m 21.796s | 2025-10-30 11:21:01.263 | 4571 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 392 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node2 | 3m 21.810s | 2025-10-30 11:21:01.277 | 4542 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 392 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node1 | 3m 21.825s | 2025-10-30 11:21:01.292 | 4532 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 392 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node1 | 3m 21.937s | 2025-10-30 11:21:01.404 | 4535 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 392 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/392 | |
| node1 | 3m 21.938s | 2025-10-30 11:21:01.405 | 4536 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node2 | 3m 21.940s | 2025-10-30 11:21:01.407 | 4545 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 392 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/392 | |
| node2 | 3m 21.941s | 2025-10-30 11:21:01.408 | 4546 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node0 | 3m 21.960s | 2025-10-30 11:21:01.427 | 4540 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 392 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/392 | |
| node0 | 3m 21.961s | 2025-10-30 11:21:01.428 | 4541 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node3 | 3m 22.008s | 2025-10-30 11:21:01.475 | 4574 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 392 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/392 | |
| node3 | 3m 22.009s | 2025-10-30 11:21:01.476 | 4575 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node1 | 3m 22.018s | 2025-10-30 11:21:01.485 | 4567 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node2 | 3m 22.018s | 2025-10-30 11:21:01.485 | 4577 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node1 | 3m 22.019s | 2025-10-30 11:21:01.486 | 4568 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 392 Timestamp: 2025-10-30T11:21:00.342545Z Next consensus number: 14489 Legacy running event hash: 8a227305f9ab683a9c6c4cff35746607e151211e96d58b92a753a2b11ab8766bee326820369f24410c79c5007e8ba18a Legacy running event mnemonic: rookie-gaze-skirt-also Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 25470378 Root hash: d2ca9930eaedce3a23aad4195b04ec75a5b4b79e407d3e64b0071f5cdffce100d9c81ba2350bb8de1e64757b4d7d03e1 (root) VirtualMap state / plug-sure-pen-decide | |||||||||
| node2 | 3m 22.020s | 2025-10-30 11:21:01.487 | 4578 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 392 Timestamp: 2025-10-30T11:21:00.342545Z Next consensus number: 14489 Legacy running event hash: 8a227305f9ab683a9c6c4cff35746607e151211e96d58b92a753a2b11ab8766bee326820369f24410c79c5007e8ba18a Legacy running event mnemonic: rookie-gaze-skirt-also Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 25470378 Root hash: d2ca9930eaedce3a23aad4195b04ec75a5b4b79e407d3e64b0071f5cdffce100d9c81ba2350bb8de1e64757b4d7d03e1 (root) VirtualMap state / plug-sure-pen-decide | |||||||||
| node2 | 3m 22.026s | 2025-10-30 11:21:01.493 | 4579 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 3m 22.026s | 2025-10-30 11:21:01.493 | 4580 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 365 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node2 | 3m 22.026s | 2025-10-30 11:21:01.493 | 4581 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 3m 22.027s | 2025-10-30 11:21:01.494 | 4569 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 3m 22.027s | 2025-10-30 11:21:01.494 | 4570 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 365 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node1 | 3m 22.027s | 2025-10-30 11:21:01.494 | 4571 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 3m 22.036s | 2025-10-30 11:21:01.503 | 4582 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 3m 22.037s | 2025-10-30 11:21:01.504 | 4580 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node1 | 3m 22.037s | 2025-10-30 11:21:01.504 | 4572 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 3m 22.037s | 2025-10-30 11:21:01.504 | 4573 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 392 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/392 {"round":392,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/392/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 3m 22.037s | 2025-10-30 11:21:01.504 | 4583 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 392 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/392 {"round":392,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/392/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node0 | 3m 22.039s | 2025-10-30 11:21:01.506 | 4581 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 392 Timestamp: 2025-10-30T11:21:00.342545Z Next consensus number: 14489 Legacy running event hash: 8a227305f9ab683a9c6c4cff35746607e151211e96d58b92a753a2b11ab8766bee326820369f24410c79c5007e8ba18a Legacy running event mnemonic: rookie-gaze-skirt-also Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 25470378 Root hash: d2ca9930eaedce3a23aad4195b04ec75a5b4b79e407d3e64b0071f5cdffce100d9c81ba2350bb8de1e64757b4d7d03e1 (root) VirtualMap state / plug-sure-pen-decide | |||||||||
| node0 | 3m 22.046s | 2025-10-30 11:21:01.513 | 4582 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 3m 22.046s | 2025-10-30 11:21:01.513 | 4583 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 365 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node0 | 3m 22.046s | 2025-10-30 11:21:01.513 | 4584 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 3m 22.056s | 2025-10-30 11:21:01.523 | 4585 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 3m 22.056s | 2025-10-30 11:21:01.523 | 4586 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 392 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/392 {"round":392,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/392/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 3m 22.099s | 2025-10-30 11:21:01.566 | 4606 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/29 for round 392 | |
| node3 | 3m 22.101s | 2025-10-30 11:21:01.568 | 4607 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 392 Timestamp: 2025-10-30T11:21:00.342545Z Next consensus number: 14489 Legacy running event hash: 8a227305f9ab683a9c6c4cff35746607e151211e96d58b92a753a2b11ab8766bee326820369f24410c79c5007e8ba18a Legacy running event mnemonic: rookie-gaze-skirt-also Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 25470378 Root hash: d2ca9930eaedce3a23aad4195b04ec75a5b4b79e407d3e64b0071f5cdffce100d9c81ba2350bb8de1e64757b4d7d03e1 (root) VirtualMap state / plug-sure-pen-decide | |||||||||
| node3 | 3m 22.107s | 2025-10-30 11:21:01.574 | 4608 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 3m 22.107s | 2025-10-30 11:21:01.574 | 4609 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 365 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 3m 22.108s | 2025-10-30 11:21:01.575 | 4610 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 3m 22.118s | 2025-10-30 11:21:01.585 | 4611 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 3m 22.118s | 2025-10-30 11:21:01.585 | 4612 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 392 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/392 {"round":392,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/392/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 4m 21.739s | 2025-10-30 11:22:01.206 | 6238 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 530 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node1 | 4m 21.745s | 2025-10-30 11:22:01.212 | 6162 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 530 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 4m 21.768s | 2025-10-30 11:22:01.235 | 6115 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 530 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 4m 21.881s | 2025-10-30 11:22:01.348 | 6149 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 530 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 4m 21.895s | 2025-10-30 11:22:01.362 | 6152 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 530 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/530 | |
| node3 | 4m 21.895s | 2025-10-30 11:22:01.362 | 6153 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node0 | 4m 21.948s | 2025-10-30 11:22:01.415 | 6118 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 530 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/530 | |
| node0 | 4m 21.949s | 2025-10-30 11:22:01.416 | 6119 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node2 | 4m 21.968s | 2025-10-30 11:22:01.435 | 6241 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 530 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/530 | |
| node2 | 4m 21.969s | 2025-10-30 11:22:01.436 | 6242 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node3 | 4m 21.981s | 2025-10-30 11:22:01.448 | 6192 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node3 | 4m 21.983s | 2025-10-30 11:22:01.450 | 6193 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 530 Timestamp: 2025-10-30T11:22:00.331816866Z Next consensus number: 17763 Legacy running event hash: 3101abb8a09f8ea5cd829bc6a5bcbd0fd7e1262386168e0258b676412886490f67cdfec7d2e24da9845669edd651f03f Legacy running event mnemonic: cage-math-umbrella-clock Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1851597565 Root hash: 08df366c5e72082de0b05a42c3d9d8d5cc154d95754e51bf28496554bcb7b4d03ed9cef9e5f62b114673cbd6434ec8fc (root) VirtualMap state / brief-snow-soft-aisle | |||||||||
| node3 | 4m 21.992s | 2025-10-30 11:22:01.459 | 6194 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces Last file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 4m 21.992s | 2025-10-30 11:22:01.459 | 6195 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 503 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node3 | 4m 21.993s | 2025-10-30 11:22:01.460 | 6196 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 4m 21.994s | 2025-10-30 11:22:01.461 | 6197 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 4m 21.994s | 2025-10-30 11:22:01.461 | 6198 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 530 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/530 {"round":530,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/530/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 4m 21.996s | 2025-10-30 11:22:01.463 | 6199 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/1 | |
| node1 | 4m 22.007s | 2025-10-30 11:22:01.474 | 6165 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 530 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/530 | |
| node1 | 4m 22.008s | 2025-10-30 11:22:01.475 | 6166 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node0 | 4m 22.025s | 2025-10-30 11:22:01.492 | 6158 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node0 | 4m 22.027s | 2025-10-30 11:22:01.494 | 6159 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 530 Timestamp: 2025-10-30T11:22:00.331816866Z Next consensus number: 17763 Legacy running event hash: 3101abb8a09f8ea5cd829bc6a5bcbd0fd7e1262386168e0258b676412886490f67cdfec7d2e24da9845669edd651f03f Legacy running event mnemonic: cage-math-umbrella-clock Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1851597565 Root hash: 08df366c5e72082de0b05a42c3d9d8d5cc154d95754e51bf28496554bcb7b4d03ed9cef9e5f62b114673cbd6434ec8fc (root) VirtualMap state / brief-snow-soft-aisle | |||||||||
| node0 | 4m 22.034s | 2025-10-30 11:22:01.501 | 6160 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 4m 22.034s | 2025-10-30 11:22:01.501 | 6161 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 503 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 4m 22.034s | 2025-10-30 11:22:01.501 | 6162 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 4m 22.035s | 2025-10-30 11:22:01.502 | 6163 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 4m 22.036s | 2025-10-30 11:22:01.503 | 6164 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 530 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/530 {"round":530,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/530/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node0 | 4m 22.037s | 2025-10-30 11:22:01.504 | 6165 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/1 | |
| node2 | 4m 22.061s | 2025-10-30 11:22:01.528 | 6281 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node2 | 4m 22.063s | 2025-10-30 11:22:01.530 | 6282 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 530 Timestamp: 2025-10-30T11:22:00.331816866Z Next consensus number: 17763 Legacy running event hash: 3101abb8a09f8ea5cd829bc6a5bcbd0fd7e1262386168e0258b676412886490f67cdfec7d2e24da9845669edd651f03f Legacy running event mnemonic: cage-math-umbrella-clock Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1851597565 Root hash: 08df366c5e72082de0b05a42c3d9d8d5cc154d95754e51bf28496554bcb7b4d03ed9cef9e5f62b114673cbd6434ec8fc (root) VirtualMap state / brief-snow-soft-aisle | |||||||||
| node2 | 4m 22.070s | 2025-10-30 11:22:01.537 | 6283 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 4m 22.070s | 2025-10-30 11:22:01.537 | 6284 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 503 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 4m 22.070s | 2025-10-30 11:22:01.537 | 6285 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 4m 22.071s | 2025-10-30 11:22:01.538 | 6286 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 4m 22.071s | 2025-10-30 11:22:01.538 | 6287 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 530 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/530 {"round":530,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/530/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 4m 22.073s | 2025-10-30 11:22:01.540 | 6288 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/1 | |
| node1 | 4m 22.084s | 2025-10-30 11:22:01.551 | 6197 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/36 for round 530 | |
| node1 | 4m 22.085s | 2025-10-30 11:22:01.552 | 6198 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 530 Timestamp: 2025-10-30T11:22:00.331816866Z Next consensus number: 17763 Legacy running event hash: 3101abb8a09f8ea5cd829bc6a5bcbd0fd7e1262386168e0258b676412886490f67cdfec7d2e24da9845669edd651f03f Legacy running event mnemonic: cage-math-umbrella-clock Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1851597565 Root hash: 08df366c5e72082de0b05a42c3d9d8d5cc154d95754e51bf28496554bcb7b4d03ed9cef9e5f62b114673cbd6434ec8fc (root) VirtualMap state / brief-snow-soft-aisle | |||||||||
| node1 | 4m 22.092s | 2025-10-30 11:22:01.559 | 6199 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 4m 22.092s | 2025-10-30 11:22:01.559 | 6200 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 503 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 4m 22.092s | 2025-10-30 11:22:01.559 | 6201 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 4m 22.093s | 2025-10-30 11:22:01.560 | 6202 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 4m 22.093s | 2025-10-30 11:22:01.560 | 6203 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 530 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/530 {"round":530,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/530/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 4m 22.095s | 2025-10-30 11:22:01.562 | 6204 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/1 | |
| node2 | 5m 21.529s | 2025-10-30 11:23:00.996 | 7737 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 668 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node1 | 5m 21.575s | 2025-10-30 11:23:01.042 | 7667 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 668 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 5m 21.587s | 2025-10-30 11:23:01.054 | 7612 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 668 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 5m 21.724s | 2025-10-30 11:23:01.191 | 7644 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 668 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 5m 21.727s | 2025-10-30 11:23:01.194 | 7631 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 668 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/668 | |
| node0 | 5m 21.727s | 2025-10-30 11:23:01.194 | 7632 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node3 | 5m 21.729s | 2025-10-30 11:23:01.196 | 7647 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 668 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/668 | |
| node3 | 5m 21.730s | 2025-10-30 11:23:01.197 | 7648 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node2 | 5m 21.753s | 2025-10-30 11:23:01.220 | 7740 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 668 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/668 | |
| node2 | 5m 21.754s | 2025-10-30 11:23:01.221 | 7741 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node1 | 5m 21.797s | 2025-10-30 11:23:01.264 | 7686 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 668 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/668 | |
| node1 | 5m 21.798s | 2025-10-30 11:23:01.265 | 7687 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node0 | 5m 21.806s | 2025-10-30 11:23:01.273 | 7671 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node0 | 5m 21.808s | 2025-10-30 11:23:01.275 | 7672 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 668 Timestamp: 2025-10-30T11:23:00.165862Z Next consensus number: 21070 Legacy running event hash: 6b374839fcc43d266b7c9b721d15048f5132a453459ae22233e7f408fa359433d1245e9879d87c8fff1d2593c23f9ec5 Legacy running event mnemonic: group-already-guide-pill Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -127198768 Root hash: 726db96fc787efb54daed5ea0b14d506ede1031545074c0f56aa183f656057791296f3a89e6828060bc3b57284cfb358 (root) VirtualMap state / ethics-split-august-annual | |||||||||
| node3 | 5m 21.813s | 2025-10-30 11:23:01.280 | 7687 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node0 | 5m 21.815s | 2025-10-30 11:23:01.282 | 7673 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 5m 21.815s | 2025-10-30 11:23:01.282 | 7674 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 640 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 5m 21.815s | 2025-10-30 11:23:01.282 | 7675 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 5m 21.815s | 2025-10-30 11:23:01.282 | 7688 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 668 Timestamp: 2025-10-30T11:23:00.165862Z Next consensus number: 21070 Legacy running event hash: 6b374839fcc43d266b7c9b721d15048f5132a453459ae22233e7f408fa359433d1245e9879d87c8fff1d2593c23f9ec5 Legacy running event mnemonic: group-already-guide-pill Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -127198768 Root hash: 726db96fc787efb54daed5ea0b14d506ede1031545074c0f56aa183f656057791296f3a89e6828060bc3b57284cfb358 (root) VirtualMap state / ethics-split-august-annual | |||||||||
| node0 | 5m 21.818s | 2025-10-30 11:23:01.285 | 7676 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 5m 21.819s | 2025-10-30 11:23:01.286 | 7677 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 668 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/668 {"round":668,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/668/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node0 | 5m 21.820s | 2025-10-30 11:23:01.287 | 7678 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/11 | |
| node3 | 5m 21.822s | 2025-10-30 11:23:01.289 | 7689 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces Last file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 5m 21.822s | 2025-10-30 11:23:01.289 | 7690 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 640 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node3 | 5m 21.822s | 2025-10-30 11:23:01.289 | 7691 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 5m 21.825s | 2025-10-30 11:23:01.292 | 7692 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 5m 21.826s | 2025-10-30 11:23:01.293 | 7693 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 668 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/668 {"round":668,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/668/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 5m 21.827s | 2025-10-30 11:23:01.294 | 7694 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/11 | |
| node2 | 5m 21.830s | 2025-10-30 11:23:01.297 | 7772 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node2 | 5m 21.832s | 2025-10-30 11:23:01.299 | 7773 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 668 Timestamp: 2025-10-30T11:23:00.165862Z Next consensus number: 21070 Legacy running event hash: 6b374839fcc43d266b7c9b721d15048f5132a453459ae22233e7f408fa359433d1245e9879d87c8fff1d2593c23f9ec5 Legacy running event mnemonic: group-already-guide-pill Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -127198768 Root hash: 726db96fc787efb54daed5ea0b14d506ede1031545074c0f56aa183f656057791296f3a89e6828060bc3b57284cfb358 (root) VirtualMap state / ethics-split-august-annual | |||||||||
| node2 | 5m 21.839s | 2025-10-30 11:23:01.306 | 7774 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 5m 21.839s | 2025-10-30 11:23:01.306 | 7775 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 640 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 5m 21.839s | 2025-10-30 11:23:01.306 | 7776 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 5m 21.842s | 2025-10-30 11:23:01.309 | 7777 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 5m 21.842s | 2025-10-30 11:23:01.309 | 7778 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 668 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/668 {"round":668,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/668/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 5m 21.844s | 2025-10-30 11:23:01.311 | 7787 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/11 | |
| node1 | 5m 21.865s | 2025-10-30 11:23:01.332 | 7718 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/43 for round 668 | |
| node1 | 5m 21.867s | 2025-10-30 11:23:01.334 | 7719 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 668 Timestamp: 2025-10-30T11:23:00.165862Z Next consensus number: 21070 Legacy running event hash: 6b374839fcc43d266b7c9b721d15048f5132a453459ae22233e7f408fa359433d1245e9879d87c8fff1d2593c23f9ec5 Legacy running event mnemonic: group-already-guide-pill Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -127198768 Root hash: 726db96fc787efb54daed5ea0b14d506ede1031545074c0f56aa183f656057791296f3a89e6828060bc3b57284cfb358 (root) VirtualMap state / ethics-split-august-annual | |||||||||
| node1 | 5m 21.874s | 2025-10-30 11:23:01.341 | 7720 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 5m 21.874s | 2025-10-30 11:23:01.341 | 7721 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 640 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 5m 21.874s | 2025-10-30 11:23:01.341 | 7722 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 5m 21.877s | 2025-10-30 11:23:01.344 | 7723 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 5m 21.877s | 2025-10-30 11:23:01.344 | 7724 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 668 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/668 {"round":668,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/668/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 5m 21.879s | 2025-10-30 11:23:01.346 | 7725 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/11 | |
| node4 | 5m 58.249s | 2025-10-30 11:23:37.716 | 1 | INFO | STARTUP | <main> | StaticPlatformBuilder: | ||
| ////////////////////// // Node is Starting // ////////////////////// | |||||||||
| node4 | 5m 58.346s | 2025-10-30 11:23:37.813 | 2 | DEBUG | STARTUP | <main> | StaticPlatformBuilder: | main() started {} [com.swirlds.logging.legacy.payload.NodeStartPayload] | |
| node4 | 5m 58.363s | 2025-10-30 11:23:37.830 | 3 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node4 | 5m 58.483s | 2025-10-30 11:23:37.950 | 4 | INFO | STARTUP | <main> | Browser: | The following nodes [4] are set to run locally | |
| node4 | 5m 58.519s | 2025-10-30 11:23:37.986 | 5 | DEBUG | STARTUP | <main> | BootstrapUtils: | Scanning the classpath for RuntimeConstructable classes | |
| node4 | 6.002m | 2025-10-30 11:23:39.614 | 6 | DEBUG | STARTUP | <main> | BootstrapUtils: | Done with registerConstructables, time taken 1627ms | |
| node4 | 6.003m | 2025-10-30 11:23:39.623 | 7 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | constructor called in Main. | |
| node4 | 6.003m | 2025-10-30 11:23:39.626 | 8 | WARN | STARTUP | <main> | PlatformConfigUtils: | Configuration property 'reconnect.asyncOutputStreamFlushMilliseconds' was renamed to 'reconnect.asyncOutputStreamFlush'. This build is currently backwards compatible with the old name, but this may not be true in a future release, so it is important to switch to the new name. | |
| node4 | 6.003m | 2025-10-30 11:23:39.667 | 9 | INFO | STARTUP | <main> | PrometheusEndpoint: | PrometheusEndpoint: Starting server listing on port: 9999 | |
| node4 | 6.004m | 2025-10-30 11:23:39.732 | 10 | WARN | STARTUP | <main> | CryptoStatic: | There are no keys on disk, Adhoc keys will be generated, but this is incompatible with DAB. | |
| node4 | 6.004m | 2025-10-30 11:23:39.733 | 11 | DEBUG | STARTUP | <main> | CryptoStatic: | Started generating keys | |
| node4 | 6m 2.351s | 2025-10-30 11:23:41.818 | 12 | DEBUG | STARTUP | <main> | CryptoStatic: | Done generating keys | |
| node4 | 6m 2.459s | 2025-10-30 11:23:41.926 | 15 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 6m 2.467s | 2025-10-30 11:23:41.934 | 16 | INFO | STARTUP | <main> | StartupStateUtils: | The following saved states were found on disk: | |
| - /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/262/SignedState.swh - /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/139/SignedState.swh - /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/11/SignedState.swh - /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/1/SignedState.swh | |||||||||
| node4 | 6m 2.468s | 2025-10-30 11:23:41.935 | 17 | INFO | STARTUP | <main> | StartupStateUtils: | Loading latest state from disk. | |
| node4 | 6m 2.468s | 2025-10-30 11:23:41.935 | 18 | INFO | STARTUP | <main> | StartupStateUtils: | Loading signed state from disk: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/262/SignedState.swh | |
| node4 | 6m 2.479s | 2025-10-30 11:23:41.946 | 19 | INFO | STATE_TO_DISK | <main> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp | |
| node4 | 6m 2.616s | 2025-10-30 11:23:42.083 | 29 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | New State Constructed. | |
| node4 | 6m 3.458s | 2025-10-30 11:23:42.925 | 31 | INFO | STARTUP | <main> | StartupStateUtils: | Loaded state's hash is the same as when it was saved. | |
| node4 | 6m 3.464s | 2025-10-30 11:23:42.931 | 32 | INFO | STARTUP | <main> | StartupStateUtils: | Platform has loaded a saved state {"round":262,"consensusTimestamp":"2025-10-30T11:20:00.075054579Z"} [com.swirlds.logging.legacy.payload.SavedStateLoadedPayload] | |
| node4 | 6m 3.471s | 2025-10-30 11:23:42.938 | 35 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 6m 3.472s | 2025-10-30 11:23:42.939 | 38 | INFO | STARTUP | <main> | BootstrapUtils: | Not upgrading software, current software is version SemanticVersion[major=1, minor=0, patch=0, pre=, build=]. | |
| node4 | 6m 3.480s | 2025-10-30 11:23:42.947 | 39 | INFO | STARTUP | <main> | AddressBookInitializer: | Using the loaded state's address book and weight values. | |
| node4 | 6m 3.491s | 2025-10-30 11:23:42.958 | 40 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 6m 3.495s | 2025-10-30 11:23:42.962 | 41 | INFO | STARTUP | <main> | ConsistencyTestingToolMain: | returning software version SemanticVersion[major=1, minor=0, patch=0, pre=, build=] | |
| node4 | 6m 4.627s | 2025-10-30 11:23:44.094 | 42 | INFO | STARTUP | <main> | OSHealthChecker: | ||
| PASSED - Clock Source Speed Check Report[callsPerSec=26221729] PASSED - Entropy Check Report[success=true, entropySource=Strong Instance, elapsedNanos=282060, randomLong=-4081712532408026839, exception=null] PASSED - Entropy Check Report[success=true, entropySource=NativePRNGBlocking:SUN, elapsedNanos=14950, randomLong=-1252142536780506820, exception=null] PASSED - File System Check Report[code=SUCCESS, readNanos=1332529, data=35, exception=null] OS Health Check Report - Complete (took 1031 ms) | |||||||||
| node4 | 6m 4.665s | 2025-10-30 11:23:44.132 | 43 | DEBUG | STARTUP | <main> | BootstrapUtils: | jvmPauseDetectorThread started | |
| node4 | 6m 4.800s | 2025-10-30 11:23:44.267 | 44 | INFO | STARTUP | <main> | PcesUtilities: | Span compaction completed for data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr501_orgn0.pces, new upper bound is 378 | |
| node4 | 6m 4.804s | 2025-10-30 11:23:44.271 | 45 | INFO | STARTUP | <main> | StandardScratchpad: | Scratchpad platform.iss contents: | |
| LAST_ISS_ROUND null | |||||||||
| node4 | 6m 4.806s | 2025-10-30 11:23:44.273 | 46 | INFO | STARTUP | <main> | PlatformBuilder: | Default platform pool parallelism: 8 | |
| node4 | 6m 4.909s | 2025-10-30 11:23:44.376 | 47 | INFO | STARTUP | <main> | SwirldsPlatform: | Starting with roster history: | |
| RosterHistory[ currentRosterRound: 0 ][ no previous roster set ] Current Roster: { "rosterEntries": [{ "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAK05TS8KZeb1MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTEwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTEwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDBoP9dI3K1PRLRK7h90D9eNCfgzuHTyJi70yDEs90XJXlE6jmgf1NE2av83VAhQHLxu8Ehc/55M9Ayx9IQc0zJLSS+IrRM9QwqoG8ZvNdRgNw+je3V/8rAK/mHId+cPnnyDplCyskyi5kWCv6kTULIewFH8/KVZwhe0/hB2+N6ujWixURrxjjGLHA6b2gPoGAb/nxiVOn+L0cWcOzcyiYShxagj0FBWV7AxKx65Ynzfe7eF0gOzBUA+IM10OM5KXJejk53Xz5KpEyGe8htO/bXFlpLdm3UzrYiIhY0oKPYKECAC1s+VAZA6i+MV0nDpqDgxHRRXD8O2arauPhEI6iVT9f05AtzElrs7U95HbpQUuP1sxkaQw+bLdMOQHHMVCgMgw2g0eDdVDAMJD7wjZ+Bs6kDc/EJELb0l1uy2GEnOZMiHkK4K1r4IyZ/ed6QpyIRKfBCNyT5IIpMoVpzRYxVXgjgFdudd8iErKyvSXHThU6nu92c+vSd+FLBFHPpb6ECAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdga5NYtV48uDCd4vIsmpGWpKuUHtDVDlCvzHc2ij8DxAR6OFp+hIRNEBXkzg1KS5qP8Wba5ptmGoV4f89HemP+AL3Azde+HjpYRtffdfTdQwmMbw7xJg2lKkEo11gDo5+zPZnVbfb3FsZ+IXKji0QshQBfg+ddTkFG3TJG1ttq3ZDw94RxFQivVnkj1p+Ogel/DuBNRWQobFVe5VrmJqbuwwN8AdrPae1dMrkZatF91On5+cpVLGfk96fYUhDohDt6KKQ6DdhvFk5rhd0vsHGMQq2gAW2+Or6ZVsKkHKx8CPINpJVKAdpE0tItI+loMO02jf9oRI/8cThWP1vNAeWnr0D6m275EZf/4qem/DdJ0FJIVou3P7tsq7eSdueDnj5RmcbW/vOBtvlXpD3SqsVRn6sltZ0sk24p+6ZMzopevCZEMf/nL3OzGvSadisXb39H9DgwkNLlefju1QLgHWf0TGfeNHluDgVDhU8+/1/KUGtr2SnZ5EVO1l59FWHALj", "gossipEndpoint": [{ "ipAddressV4": "I95wBg==", "port": 30124 }, { "ipAddressV4": "CoAAAw==", "port": 30124 }] }, { "nodeId": "1", "weight": "12500000000", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIIHWg7e2Q/smQwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlMjAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlMjCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKr5WsBepS3+y/0/yfBjzMWje7zianEz7sszrNWV3cGu2KUlR7v2+9wp/EtX1+BdcGlTTojgFs5nEBN4lM76Cp6JjFH461yN8GSkIkpe8GZnb1w4KEjZj5UYMbq+qOUI6QmwmgLeO8RHAsS6lCP1AyGFalb2ZVJ09DcYDxCRXeFj4BqvNbtD5r5DTCtpVT4ax3eb3pzNSGsjQUG9zhyp/WcsAmwmzKdMl72tk6qF8tlAWXyzwiCujWHS0Kln0C5pyEjeFNsG299toC4pgT8juxijgseTeIFRnNHmGSeSmXpAkEELlwLKR8HOnqeiS5UXNqdbxNemx/EpJSc5rTB6kzLX24dIuRsgyIIFWx73goOzmaHUolN4xmenifoMYlSNNM07WrsvmjRC5OLc/uGhdWqhZGBCH6AJB8Cmw84QLXVdHE6LiueP1oMd7g++N4X880wJkuh0ebfV3i7etUIn0jLlM50AkRucG9kwZDJ/M4LY7FT2F85R1/o2FaB/537ARQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQB5lTkqYw0hEW+BJTFsQ8jEHfIDNRJ0kNbVuibfP+u7kzlJy15lCEi+Qw6E3d8hA1QBX3xJMxNBlrtYPrdG26hh/tOwo5Np/OfxQC5jo0Q7n7hu7aLxZRUB/q7AfdDbOun4Za6rJhT3+EsFocyARWp8bYSk3YILBMkP+2VYDRkgQidzKgKtO5yv21Y9sEgziSprc+dQb/tqn5aQZLWavFwCLwnB3t4r4qwLHkkH00Jw51uOvLeM49/t333V5Caa7wmWzMcE+KSWW0QWFRxeJrodSyjPdmDi4D8lKN5WJHSAU5L2yWIODUyWD/cvsAapTv7xXk9ja/Ssb9DpMQnM1xh0hYaESajNeL1QbGuZgPxAwrw981h7kprR2P2iMGRVGA6u4ezxmhW3s7D+yJ3+Yxs/x2J/sw65Z16mRYXRWYWHQmhgaVQjIviiAkVB6CWZo1kHl/eYaVedQzKlrTpbr3JtmwGwhYEOnrkzsC63h8/AG9gRtIAIGWGqTPWbn2pEm8M=", "gossipEndpoint": [{ "ipAddressV4": "I+g9lA==", "port": 30125 }, { "ipAddressV4": "CoAAIw==", "port": 30125 }] }, { "nodeId": "2", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAJg3GRFp5bT9MA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTMwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTMwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQCl5ut2dCleDmgEneRYpAKa9Pe2qnXzgF+BEIuTfizG2OcPQi/ltv+6HxSrJXtuWNaiX/G4iP7iBzWj2ysaAYwfYj0ezTSMLRqM9hXzVgLtW0LJEF6a8vUXPsJt4GEJkUKiYCCO1MP1NLd3y/3SVJrFhwJSPqKYm2pQNg84WfPDWSkzSneOIO4Z0uWDXgs+vzSNyChWOxVieFQhLjcELtyj6narmLox+Jdo/SxUzPuktuFB3ebNgUqWPkjljgZpl00BTmbRIVHgHfDVulo2PBpXd0VplIDgdPr5zMKdTrKCuDKey8Mft72RkPKMe9LZVZ/21+rXVEh+olvvUCySsP2RkWPUJJD90c8wKo01rZsjAOXscJKQcBYlam5XXO4ZBRYzEdxuivbkPwsOoQ83swCR3alPvwfbg11Va+zXE6sRbUM9LqkYo/M3Hwg8tSIXu8oah6csputanz867dzWwyVJEPzmiXZ6ncVDQO31QlB7RndWCqKTjOQpnpblUMsrE9MCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAnUA8+kz7L+eSOm/iVvUNYF10PKO2nZtxWWL7R1vwK/2Up765PwqxKb0eSEM4bjgvZq1GuGXs9X/Y7dos42yntXvgeUY+/2JzCnw4J5tzxytZ+IKX6DR67NjDzDzVZQfptjLQrb8E7yzml0uxsqrhNPWl57Bmfe66Kg2lD11jImeeEhExlRggFukoiUWVwRNU21Q1jMUWrg2ZwfP+6fFTgRt0WR+X5zkyYPbvI6/yv7reYGjPDuZTOFhbwG8LUTQxdttDswPjnQ606kMyninL+aNelSdV/UIII7lpr/dTvgQAnrlBaGXvdy6brh3wWEwia0FZFZcKEs6M+jZ3MrFxvlTfUIdI3jRq12L10cCDi2VhORg4JmvlM+Tk6kJeSku30ZLAVo3S7GbTdvkuesOxz3UwnF7yfOA1KYOPvhv1oLxGV5z05glsn1OBKnXMdzsKFbAYYHj81bgBni2WLuIpv3oXlai2uc4y9m8LvWAQ+h/ivyog34Ai3Pvr5ZZOFgjy", "gossipEndpoint": [{ "ipAddressV4": "Iir4zA==", "port": 30126 }, { "ipAddressV4": "CoAAHA==", "port": 30126 }] }, { "nodeId": "3", "weight": "12500000000", "gossipCaCertificate": "MIIDpzCCAg+gAwIBAgIJAN7hww13zBZEMA0GCSqGSIb3DQEBDAUAMBIxEDAOBgNVBAMTB3Mtbm9kZTQwIBcNMDAwMTAxMDAwMDAwWhgPMjEwMDAxMDEwMDAwMDBaMBIxEDAOBgNVBAMTB3Mtbm9kZTQwggGiMA0GCSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDK/bVyv0ZUeJZ4cIOImM+wmqtYjCw4jPAC549WQPPV1vG0lzSpgV+nRKqmWBexhLlKN3bsvrfNCUpKSq8meFyCtdppT1dhUOmEZcoNhLZzqxXb2HYYqRPv82tR+tbh+27WFsBOOqYrYTvr72ECD7qDOuw/Xob6KImaw/b/SIAPecMoYy25fkgYkJSETwd8HUpwssYH/JTLBF8eGjjTTMuu14ARQKeH8BXSs+jjV1+3IItXERS8ryUGDjqc5vC8ZW1kDVQbb91IDxRjqZbFyhuasocCqTAcZuiEgE8Wilwp2g1vbAUnHnvKNfiaEAHoEV6vF4lelaWhOnN2U5tnox/ns6PiDqIbOfs0pmXxjAK0vxc6oZM3TwdRtzo6cSb/AYfQdnmQzkra980kHN12r3f7PK2PzGBuVUPT7fLGA4S3vQDYO4rqcgTc/OLobtqLtdBusOFjZscfIfUW4GVWJUI1j+fwvHacxWLmyZwlQ5Q47UtrtjWpFru7CTn5S477lqMCAwEAATANBgkqhkiG9w0BAQwFAAOCAYEAdW6AWDhT0eOJw+0O6MYngmCgkXfFsgBC/B1plaE596hHo58FHxzCNiLFdvfRj37rxujvqsDAkADWUmOzLzLHYMXu302HzDqAMNY6FZJc32y4ZDsIQpaUOAuiNHAHwFXuPRInVpCqztfJMgw4RhOhcCTEsoIJsqoIN1t4M0pEVAv6x3nJwFKZqSNOZrQ7sOW32FjwWS3kHwRsCTtqdk5n2KxU6wr/fggV3QsSPRMYro8sUfwu93mqggtswwWqfeKlsz5WiaR9aqLnb8z1R6HLvA0bcoPWzjgn8RdP+9we4z06iZ5vdBuNpwBjrCKUELWISyAoekLGGxyS8pPqYiSBRNUoaPITSuUjcCBbJ9EFvm72QgCBesbwF71KPabTPbMPhLmf+uAi+zmeu8ZeVvT6DrX9OHSkIvIEQFry9BrqOT3ce6KBHSO1HpXIetj5Wcd3WHXtz9ulBL9ikWC8eh7/+we51ucmLvFzNKznElhT2Dp+czXUVNEUjp3u/66pyRA4", "gossipEndpoint": [{ "ipAddressV4": "IqyjSg==", "port": 30127 }, { "ipAddressV4": "CoAAIA==", "port": 30127 }] }, { "nodeId": "4", "gossipCaCertificate": "MIIDpjCCAg6gAwIBAgIId2RXuetTjnAwDQYJKoZIhvcNAQEMBQAwEjEQMA4GA1UEAxMHcy1ub2RlNTAgFw0wMDAxMDEwMDAwMDBaGA8yMTAwMDEwMTAwMDAwMFowEjEQMA4GA1UEAxMHcy1ub2RlNTCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKw8NsPZVKyXW3smNAUcoN/JOyhyJah6Y2gPOCek6hcOaLjg6DvZxLzinpLKwScrgRIIiuU/9hUsy6EiOtCuX3lT4ByKA5XdN1YQAXL1TS00vUf7BR1b+PW58gaJtepZctHvyklNxFeVT/vq2zWQa1sXeTz0OReJXO+8WWO1AjciYyv963zZ6jvk5yhWGukC5NJ2pJTjFh3+2+PivnLevNNnW6bZkdgSl3MThr78nsWlUQykvx+FNIgLlrq+4fCIFzXMeJRRXqo0MJlsxBfQaSu1arqMGE5BQWBEr51EH9UKNP3MSEntr1h1WMeJ3tZXFJ/z1xzKxsVII8HNtjynv1MoEGY4AQDUWI0CRUBv0uAmBUljGJVyqgHIPO6OGznJdkQsmSOSpqdVeNSWEd85Nh0Niorvpat9g5vUJ9zWKPWmgiww+RQiqq8jA2BiD/3n+I8UIMbMShiJUQBmnveAEWLMeV6TIuIBllGQ0a5oB+PQOZh0g+TvsjnH8/c1h1MSHQIDAQABMA0GCSqGSIb3DQEBDAUAA4IBgQBYPcTDNQjigDZ2iY/nA9BhiyNJDHQBecLoADQ+5mxGrGclUcvd002ovbVhoGlcMkVyG/Am9/Y3t6zpO4pMQy7Jecfx8U8+VtKEPFkpfslCmEHT9EpnCGcS5t1KB2KcqnZ57f9uUZLBtMPem1FO6wcT71TXr4/+hGNuLpLmtKkQWARnVURR1/MHhcJ35BH1/x1VIeNc+PBpTE/blszn69Gwqt/HlpNTnYfqS0vhTCfOO07dxn8n904xb1nZ0l0k7pCQdRTTn+dYxmvQ7MlTRK5iOlOKIiRAbD8Fwyfo93cvDj6V56c7Gz+knyjz0iARMsptumqevmfiJ324HzV/12SukJokFDl9G6MjG99ucg3CQaIxa5kRVN1b5D+QMeowfj0lKTchGm/BbuO4zousIE+aWCGfy41CccTP11JW2quwjsVGufb5fvCfDXop5f9i1+95oHd2JyLmHDnJBWEqBHret4Dx8P8GyQhyZB6jkZpVwJy/ruPyrAL3kcKqUZecaOg=", "gossipEndpoint": [{ "ipAddressV4": "IhzSrg==", "port": 30128 }, { "ipAddressV4": "CoAAHQ==", "port": 30128 }] }] } | |||||||||
| node4 | 6m 4.940s | 2025-10-30 11:23:44.407 | 48 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | State initialized with state long -8308913855573846839. | |
| node4 | 6m 4.941s | 2025-10-30 11:23:44.408 | 49 | INFO | STARTUP | <main> | ConsistencyTestingToolState: | State initialized with 262 rounds handled. | |
| node4 | 6m 4.942s | 2025-10-30 11:23:44.409 | 50 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/4/ConsistencyTestLog.csv | |
| node4 | 6m 4.943s | 2025-10-30 11:23:44.410 | 51 | INFO | STARTUP | <main> | TransactionHandlingHistory: | Log file found. Parsing previous history | |
| node4 | 6m 4.992s | 2025-10-30 11:23:44.459 | 52 | INFO | STARTUP | <main> | StateInitializer: | The platform is using the following initial state: | |
| Round: 262 Timestamp: 2025-10-30T11:20:00.075054579Z Next consensus number: 9796 Legacy running event hash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 Legacy running event mnemonic: hour-soup-element-giant Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 59014429 Root hash: 8426f4d659f6d33d246b89ebbc038f302915037497d2f857fea83bc4aba823bd76b181995088df51263d7ca10351e201 (root) VirtualMap state / kitten-grab-disagree-observe | |||||||||
| node4 | 6m 4.998s | 2025-10-30 11:23:44.465 | 54 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Starting the ReconnectController | |
| node4 | 6m 5.211s | 2025-10-30 11:23:44.678 | 55 | INFO | EVENT_STREAM | <main> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: 0372d181bdc32b0deb699da045c13e412ad46166b15955a77366f2050f9cff6a8760d9d5830363b65dbca109b285e9b4 | |
| node4 | 6m 5.221s | 2025-10-30 11:23:44.688 | 56 | INFO | STARTUP | <platformForkJoinThread-3> | Shadowgraph: | Shadowgraph starting from expiration threshold 235 | |
| node4 | 6m 5.228s | 2025-10-30 11:23:44.695 | 58 | INFO | STARTUP | <<start-node-4>> | ConsistencyTestingToolMain: | init called in Main for node 4. | |
| node4 | 6m 5.228s | 2025-10-30 11:23:44.695 | 59 | INFO | STARTUP | <<start-node-4>> | SwirldsPlatform: | Starting platform 4 | |
| node4 | 6m 5.230s | 2025-10-30 11:23:44.697 | 60 | INFO | STARTUP | <<platform: recycle-bin-cleanup>> | RecycleBinImpl: | Deleted 0 files from the recycle bin. | |
| node4 | 6m 5.233s | 2025-10-30 11:23:44.700 | 61 | INFO | STARTUP | <<start-node-4>> | CycleFinder: | No cyclical back pressure detected in wiring model. | |
| node4 | 6m 5.234s | 2025-10-30 11:23:44.701 | 62 | INFO | STARTUP | <<start-node-4>> | DirectSchedulerChecks: | No illegal direct scheduler use detected in the wiring model. | |
| node4 | 6m 5.235s | 2025-10-30 11:23:44.702 | 63 | INFO | STARTUP | <<start-node-4>> | InputWireChecks: | All input wires have been bound. | |
| node4 | 6m 5.237s | 2025-10-30 11:23:44.704 | 64 | INFO | STARTUP | <<start-node-4>> | SwirldsPlatform: | replaying preconsensus event stream starting at 235 | |
| node4 | 6m 5.244s | 2025-10-30 11:23:44.711 | 65 | INFO | PLATFORM_STATUS | <platformForkJoinThread-2> | StatusStateMachine: | Platform spent 183.0 ms in STARTING_UP. Now in REPLAYING_EVENTS | |
| node4 | 6m 5.518s | 2025-10-30 11:23:44.985 | 66 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:4 H:15bb09809bfd BR:260), num remaining: 4 | |
| node4 | 6m 5.521s | 2025-10-30 11:23:44.988 | 67 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:3 H:8fd5d11960db BR:260), num remaining: 3 | |
| node4 | 6m 5.521s | 2025-10-30 11:23:44.988 | 68 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:0 H:a0b7f0b9d50c BR:260), num remaining: 2 | |
| node4 | 6m 5.522s | 2025-10-30 11:23:44.989 | 69 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:2 H:e28327b8b484 BR:260), num remaining: 1 | |
| node4 | 6m 5.522s | 2025-10-30 11:23:44.989 | 70 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:1 H:84b3a90520d1 BR:260), num remaining: 0 | |
| node4 | 6m 6.405s | 2025-10-30 11:23:45.872 | 978 | INFO | STARTUP | <<start-node-4>> | PcesReplayer: | Replayed 5,393 preconsensus events with max birth round 378. These events contained 7,500 transactions. 115 rounds reached consensus spanning 53.8 seconds of consensus time. The latest round to reach consensus is round 377. Replay took 1.2 seconds. | |
| node4 | 6m 6.407s | 2025-10-30 11:23:45.874 | 979 | INFO | STARTUP | <<app: appMain 4>> | ConsistencyTestingToolMain: | run called in Main. | |
| node4 | 6m 6.409s | 2025-10-30 11:23:45.876 | 980 | INFO | PLATFORM_STATUS | <platformForkJoinThread-7> | StatusStateMachine: | Platform spent 1.2 s in REPLAYING_EVENTS. Now in OBSERVING | |
| node4 | 6m 7.311s | 2025-10-30 11:23:46.778 | 1129 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Preparing for reconnect, stopping gossip | |
| node4 | 6m 7.311s | 2025-10-30 11:23:46.778 | 1130 | INFO | RECONNECT | <<platform-core: SyncProtocolWith2 4 to 2>> | RpcPeerHandler: | SELF_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] remote ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] | |
| node4 | 6m 7.311s | 2025-10-30 11:23:46.778 | 1131 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | RpcPeerHandler: | SELF_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] remote ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] | |
| node4 | 6m 7.311s | 2025-10-30 11:23:46.778 | 1132 | INFO | RECONNECT | <<platform-core: SyncProtocolWith0 4 to 0>> | RpcPeerHandler: | SELF_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] remote ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] | |
| node4 | 6m 7.311s | 2025-10-30 11:23:46.778 | 1133 | INFO | RECONNECT | <<platform-core: SyncProtocolWith3 4 to 3>> | RpcPeerHandler: | SELF_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] remote ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] | |
| node4 | 6m 7.312s | 2025-10-30 11:23:46.779 | 1134 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Preparing for reconnect, start clearing queues | |
| node4 | 6m 7.312s | 2025-10-30 11:23:46.779 | 1135 | INFO | PLATFORM_STATUS | <platformForkJoinThread-2> | StatusStateMachine: | Platform spent 901.0 ms in OBSERVING. Now in BEHIND | |
| node0 | 6m 7.380s | 2025-10-30 11:23:46.847 | 8821 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 0 to 4>> | RpcPeerHandler: | OTHER_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] remote ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] | |
| node1 | 6m 7.380s | 2025-10-30 11:23:46.847 | 8884 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | RpcPeerHandler: | OTHER_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] remote ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] | |
| node3 | 6m 7.380s | 2025-10-30 11:23:46.847 | 8881 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 3 to 4>> | RpcPeerHandler: | OTHER_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] remote ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] | |
| node2 | 6m 7.381s | 2025-10-30 11:23:46.848 | 9014 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 2 to 4>> | RpcPeerHandler: | OTHER_FALLEN_BEHIND local ev=EventWindow[latestConsensusRound=772,ancientThreshold=745,expiredThreshold=671] remote ev=EventWindow[latestConsensusRound=377,ancientThreshold=350,expiredThreshold=276] | |
| node4 | 6m 7.466s | 2025-10-30 11:23:46.933 | 1136 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Queues have been cleared | |
| node4 | 6m 7.467s | 2025-10-30 11:23:46.934 | 1137 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Waiting for a state to be obtained from a peer | |
| node1 | 6m 7.562s | 2025-10-30 11:23:47.029 | 8896 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | ReconnectStateTeacher: | Starting reconnect in the role of the sender {"receiving":false,"nodeId":1,"otherNodeId":4,"round":772} [com.swirlds.logging.legacy.payload.ReconnectStartPayload] | |
| node1 | 6m 7.562s | 2025-10-30 11:23:47.029 | 8897 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | ReconnectStateTeacher: | The following state will be sent to the learner: | |
| Round: 772 Timestamp: 2025-10-30T11:23:45.566754510Z Next consensus number: 23582 Legacy running event hash: dca8d321dd24407a06f2b73cf972b0d9a1c7184866803bd585fa0609535b944a4909a69193af5bb6dcdf9087f6116955 Legacy running event mnemonic: help-orbit-fortune-aunt Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -1313990699 Root hash: 57bec768f947693d849bf24b21d3e36b71004503c89cbe4d84a6b29dd4a2ca98eaa99fb89c9411db9b1bed92fb14f945 (root) VirtualMap state / water-basic-strong-chicken | |||||||||
| node1 | 6m 7.563s | 2025-10-30 11:23:47.030 | 8898 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | ReconnectStateTeacher: | Sending signatures from nodes 0, 1, 3 (signing weight = 37500000000/50000000000) for state hash 57bec768f947693d849bf24b21d3e36b71004503c89cbe4d84a6b29dd4a2ca98eaa99fb89c9411db9b1bed92fb14f945 | |
| node1 | 6m 7.563s | 2025-10-30 11:23:47.030 | 8899 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | ReconnectStateTeacher: | Starting synchronization in the role of the sender. | |
| node4 | 6m 7.633s | 2025-10-30 11:23:47.100 | 1138 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | ReconnectStatePeerProtocol: | Starting reconnect in role of the receiver. {"receiving":true,"nodeId":4,"otherNodeId":1,"round":377} [com.swirlds.logging.legacy.payload.ReconnectStartPayload] | |
| node4 | 6m 7.634s | 2025-10-30 11:23:47.101 | 1139 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | ReconnectStateLearner: | Receiving signed state signatures | |
| node4 | 6m 7.635s | 2025-10-30 11:23:47.102 | 1140 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | ReconnectStateLearner: | Received signatures from nodes 0, 1, 3 | |
| node1 | 6m 7.685s | 2025-10-30 11:23:47.152 | 8915 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | TeachingSynchronizer: | sending tree rooted at com.swirlds.virtualmap.VirtualMap with route [] | |
| node1 | 6m 7.693s | 2025-10-30 11:23:47.160 | 8916 | INFO | RECONNECT | <<work group teaching-synchronizer: async-input-stream #0>> | AsyncInputStream: | com.swirlds.common.merkle.synchronization.streams.AsyncInputStream@6dca36ad start run() | |
| node4 | 6m 7.855s | 2025-10-30 11:23:47.322 | 1167 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | learner calls receiveTree() | |
| node4 | 6m 7.855s | 2025-10-30 11:23:47.322 | 1168 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | synchronizing tree | |
| node4 | 6m 7.856s | 2025-10-30 11:23:47.323 | 1169 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | receiving tree rooted at com.swirlds.virtualmap.VirtualMap with route [] | |
| node4 | 6m 7.866s | 2025-10-30 11:23:47.333 | 1170 | INFO | RECONNECT | <<work group learning-synchronizer: async-input-stream #0>> | AsyncInputStream: | com.swirlds.common.merkle.synchronization.streams.AsyncInputStream@40435271 start run() | |
| node4 | 6m 7.925s | 2025-10-30 11:23:47.392 | 1171 | INFO | RECONNECT | <<work group learning-synchronizer: async-input-stream #0>> | ReconnectNodeRemover: | setPathInformation(): firstLeafPath: 4 -> 4, lastLeafPath: 8 -> 8 | |
| node4 | 6m 7.925s | 2025-10-30 11:23:47.392 | 1172 | INFO | RECONNECT | <<work group learning-synchronizer: async-input-stream #0>> | ReconnectNodeRemover: | setPathInformation(): done | |
| node4 | 6m 8.071s | 2025-10-30 11:23:47.538 | 1173 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | LearnerPushTask: | learner thread finished the learning loop for the current subtree | |
| node4 | 6m 8.071s | 2025-10-30 11:23:47.538 | 1174 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | LearnerPushVirtualTreeView: | call nodeRemover.allNodesReceived() | |
| node4 | 6m 8.072s | 2025-10-30 11:23:47.539 | 1175 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | ReconnectNodeRemover: | allNodesReceived() | |
| node4 | 6m 8.072s | 2025-10-30 11:23:47.539 | 1176 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | ReconnectNodeRemover: | allNodesReceived(): done | |
| node4 | 6m 8.072s | 2025-10-30 11:23:47.539 | 1177 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | LearnerPushVirtualTreeView: | call root.endLearnerReconnect() | |
| node4 | 6m 8.073s | 2025-10-30 11:23:47.540 | 1178 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | VirtualMap: | call reconnectIterator.close() | |
| node4 | 6m 8.073s | 2025-10-30 11:23:47.540 | 1179 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | VirtualMap: | call setHashPrivate() | |
| node4 | 6m 8.095s | 2025-10-30 11:23:47.562 | 1189 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | VirtualMap: | call postInit() | |
| node4 | 6m 8.095s | 2025-10-30 11:23:47.562 | 1191 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | VirtualMap: | endLearnerReconnect() complete | |
| node4 | 6m 8.096s | 2025-10-30 11:23:47.563 | 1192 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | LearnerPushVirtualTreeView: | close() complete | |
| node4 | 6m 8.096s | 2025-10-30 11:23:47.563 | 1193 | INFO | RECONNECT | <<work group learning-synchronizer: learner-task #2>> | LearnerPushTask: | learner thread closed input, output, and view for the current subtree | |
| node4 | 6m 8.097s | 2025-10-30 11:23:47.564 | 1194 | INFO | RECONNECT | <<work group learning-synchronizer: async-input-stream #0>> | AsyncInputStream: | com.swirlds.common.merkle.synchronization.streams.AsyncInputStream@40435271 finish run() | |
| node4 | 6m 8.097s | 2025-10-30 11:23:47.564 | 1195 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | received tree rooted at com.swirlds.virtualmap.VirtualMap with route [] | |
| node4 | 6m 8.098s | 2025-10-30 11:23:47.565 | 1196 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | synchronization complete | |
| node4 | 6m 8.098s | 2025-10-30 11:23:47.565 | 1197 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | learner calls initialize() | |
| node4 | 6m 8.099s | 2025-10-30 11:23:47.566 | 1198 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | initializing tree | |
| node4 | 6m 8.099s | 2025-10-30 11:23:47.566 | 1199 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | initialization complete | |
| node4 | 6m 8.099s | 2025-10-30 11:23:47.566 | 1200 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | learner calls hash() | |
| node4 | 6m 8.099s | 2025-10-30 11:23:47.566 | 1201 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | hashing tree | |
| node4 | 6m 8.099s | 2025-10-30 11:23:47.566 | 1202 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | hashing complete | |
| node4 | 6m 8.099s | 2025-10-30 11:23:47.566 | 1203 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | learner calls logStatistics() | |
| node4 | 6m 8.102s | 2025-10-30 11:23:47.569 | 1204 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | Finished synchronization {"timeInSeconds":0.242,"hashTimeInSeconds":0.0,"initializationTimeInSeconds":0.0,"totalNodes":9,"leafNodes":5,"redundantLeafNodes":2,"internalNodes":4,"redundantInternalNodes":0} [com.swirlds.logging.legacy.payload.SynchronizationCompletePayload] | |
| node4 | 6m 8.103s | 2025-10-30 11:23:47.570 | 1205 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | ReconnectMapMetrics: transfersFromTeacher=9; transfersFromLearner=8; internalHashes=3; internalCleanHashes=0; internalData=0; internalCleanData=0; leafHashes=5; leafCleanHashes=2; leafData=5; leafCleanData=2 | |
| node4 | 6m 8.103s | 2025-10-30 11:23:47.570 | 1206 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | LearningSynchronizer: | learner is done synchronizing | |
| node4 | 6m 8.104s | 2025-10-30 11:23:47.571 | 1207 | INFO | STARTUP | <<platform-core: SyncProtocolWith1 4 to 1>> | ConsistencyTestingToolState: | New State Constructed. | |
| node4 | 6m 8.109s | 2025-10-30 11:23:47.576 | 1208 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | ReconnectStateLearner: | Reconnect data usage report {"dataMegabytes":0.0058650970458984375} [com.swirlds.logging.legacy.payload.ReconnectDataUsagePayload] | |
| node1 | 6m 8.141s | 2025-10-30 11:23:47.608 | 8920 | INFO | RECONNECT | <<work group teaching-synchronizer: async-input-stream #0>> | AsyncInputStream: | com.swirlds.common.merkle.synchronization.streams.AsyncInputStream@6dca36ad finish run() | |
| node1 | 6m 8.144s | 2025-10-30 11:23:47.611 | 8921 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | TeachingSynchronizer: | finished sending tree | |
| node1 | 6m 8.147s | 2025-10-30 11:23:47.614 | 8924 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | ReconnectStateTeacher: | Finished synchronization in the role of the sender. | |
| node1 | 6m 8.181s | 2025-10-30 11:23:47.648 | 8925 | INFO | RECONNECT | <<platform-core: SyncProtocolWith4 1 to 4>> | ReconnectStateTeacher: | Finished reconnect in the role of the sender. {"receiving":false,"nodeId":1,"otherNodeId":4,"round":772} [com.swirlds.logging.legacy.payload.ReconnectFinishPayload] | |
| node4 | 6m 8.222s | 2025-10-30 11:23:47.689 | 1209 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | ReconnectStatePeerProtocol: | Finished reconnect in the role of the receiver. {"receiving":true,"nodeId":4,"otherNodeId":1,"round":772} [com.swirlds.logging.legacy.payload.ReconnectFinishPayload] | |
| node4 | 6m 8.223s | 2025-10-30 11:23:47.690 | 1210 | INFO | RECONNECT | <<platform-core: SyncProtocolWith1 4 to 1>> | ReconnectStatePeerProtocol: | Information for state received during reconnect: | |
| Round: 772 Timestamp: 2025-10-30T11:23:45.566754510Z Next consensus number: 23582 Legacy running event hash: dca8d321dd24407a06f2b73cf972b0d9a1c7184866803bd585fa0609535b944a4909a69193af5bb6dcdf9087f6116955 Legacy running event mnemonic: help-orbit-fortune-aunt Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -1313990699 Root hash: 57bec768f947693d849bf24b21d3e36b71004503c89cbe4d84a6b29dd4a2ca98eaa99fb89c9411db9b1bed92fb14f945 (root) VirtualMap state / water-basic-strong-chicken | |||||||||
| node4 | 6m 8.224s | 2025-10-30 11:23:47.691 | 1211 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | A state was obtained from a peer | |
| node4 | 6m 8.225s | 2025-10-30 11:23:47.692 | 1212 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | The state obtained from a peer was validated | |
| node4 | 6m 8.226s | 2025-10-30 11:23:47.693 | 1214 | DEBUG | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | `loadState` : reloading state | |
| node4 | 6m 8.226s | 2025-10-30 11:23:47.693 | 1215 | INFO | STARTUP | <<platform-core: reconnectController>> | ConsistencyTestingToolState: | State initialized with state long -7405525125712608602. | |
| node4 | 6m 8.227s | 2025-10-30 11:23:47.694 | 1216 | INFO | STARTUP | <<platform-core: reconnectController>> | ConsistencyTestingToolState: | State initialized with 772 rounds handled. | |
| node4 | 6m 8.227s | 2025-10-30 11:23:47.694 | 1217 | INFO | STARTUP | <<platform-core: reconnectController>> | TransactionHandlingHistory: | Consistency testing tool log path: data/saved/consistency-test/4/ConsistencyTestLog.csv | |
| node4 | 6m 8.227s | 2025-10-30 11:23:47.694 | 1218 | INFO | STARTUP | <<platform-core: reconnectController>> | TransactionHandlingHistory: | Log file found. Parsing previous history | |
| node4 | 6m 8.240s | 2025-10-30 11:23:47.707 | 1223 | INFO | STATE_TO_DISK | <<platform-core: reconnectController>> | DefaultSavedStateController: | Signed state from round 772 created, will eventually be written to disk, for reason: RECONNECT | |
| node4 | 6m 8.240s | 2025-10-30 11:23:47.707 | 1224 | INFO | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Initializing statistics output in CSV format [ csvOutputFolder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats', csvFileName = 'PlatformTesting4.csv' ] | |
| node4 | 6m 8.241s | 2025-10-30 11:23:47.708 | 1225 | INFO | PLATFORM_STATUS | <platformForkJoinThread-6> | StatusStateMachine: | Platform spent 927.0 ms in BEHIND. Now in RECONNECT_COMPLETE | |
| node4 | 6m 8.242s | 2025-10-30 11:23:47.709 | 1226 | INFO | STARTUP | <platformForkJoinThread-7> | Shadowgraph: | Shadowgraph starting from expiration threshold 745 | |
| node4 | 6m 8.244s | 2025-10-30 11:23:47.711 | 1229 | DEBUG | STARTUP | <<platform-core: MetricsThread #0>> | LegacyCsvWriter: | CsvWriter: Using the existing metrics folder [ folder = '/opt/hgcapp/services-hedera/HapiApp2.0/data/stats' ] | |
| node4 | 6m 8.245s | 2025-10-30 11:23:47.712 | 1230 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 772 state to disk. Reason: RECONNECT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/772 | |
| node4 | 6m 8.246s | 2025-10-30 11:23:47.713 | 1231 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/3 for round 772 | |
| node4 | 6m 8.261s | 2025-10-30 11:23:47.728 | 1243 | INFO | EVENT_STREAM | <<platform-core: reconnectController>> | DefaultConsensusEventStream: | EventStreamManager::updateRunningHash: dca8d321dd24407a06f2b73cf972b0d9a1c7184866803bd585fa0609535b944a4909a69193af5bb6dcdf9087f6116955 | |
| node4 | 6m 8.262s | 2025-10-30 11:23:47.729 | 1244 | INFO | STARTUP | <platformForkJoinThread-3> | PcesFileManager: | Due to recent operations on this node, the local preconsensus event stream will have a discontinuity. The last file with the old origin round is 2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr378_orgn0.pces. All future files will have an origin round of 772. | |
| node4 | 6m 8.263s | 2025-10-30 11:23:47.730 | 1245 | INFO | RECONNECT | <<platform-core: reconnectController>> | ReconnectController: | Reconnect almost done resuming gossip | |
| node4 | 6m 8.408s | 2025-10-30 11:23:47.875 | 1266 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/3 for round 772 | |
| node4 | 6m 8.411s | 2025-10-30 11:23:47.878 | 1267 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 772 Timestamp: 2025-10-30T11:23:45.566754510Z Next consensus number: 23582 Legacy running event hash: dca8d321dd24407a06f2b73cf972b0d9a1c7184866803bd585fa0609535b944a4909a69193af5bb6dcdf9087f6116955 Legacy running event mnemonic: help-orbit-fortune-aunt Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: -1313990699 Root hash: 57bec768f947693d849bf24b21d3e36b71004503c89cbe4d84a6b29dd4a2ca98eaa99fb89c9411db9b1bed92fb14f945 (root) VirtualMap state / water-basic-strong-chicken | |||||||||
| node4 | 6m 8.452s | 2025-10-30 11:23:47.919 | 1271 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus file on disk. | |
| File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr378_orgn0.pces | |||||||||
| node4 | 6m 8.452s | 2025-10-30 11:23:47.919 | 1272 | WARN | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | No preconsensus event files meeting specified criteria found to copy. Lower bound: 745 | |
| node4 | 6m 8.459s | 2025-10-30 11:23:47.926 | 1273 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 772 to disk. Reason: RECONNECT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/772 {"round":772,"freezeState":false,"reason":"RECONNECT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/772/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 6m 8.463s | 2025-10-30 11:23:47.930 | 1274 | INFO | PLATFORM_STATUS | <platformForkJoinThread-3> | StatusStateMachine: | Platform spent 219.0 ms in RECONNECT_COMPLETE. Now in CHECKING | |
| node4 | 6m 9.409s | 2025-10-30 11:23:48.876 | 1275 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:1 H:949924861e7f BR:770), num remaining: 3 | |
| node4 | 6m 9.410s | 2025-10-30 11:23:48.877 | 1276 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:3 H:2d126fdadcef BR:770), num remaining: 2 | |
| node4 | 6m 9.410s | 2025-10-30 11:23:48.877 | 1277 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:0 H:868f4f1e690c BR:770), num remaining: 1 | |
| node4 | 6m 9.410s | 2025-10-30 11:23:48.877 | 1278 | INFO | STARTUP | <<scheduler ConsensusEngine>> | ConsensusImpl: | Found init judge (CR:2 H:2adf6e937534 BR:770), num remaining: 0 | |
| node4 | 6m 13.185s | 2025-10-30 11:23:52.652 | 1426 | INFO | PLATFORM_STATUS | <platformForkJoinThread-2> | StatusStateMachine: | Platform spent 4.7 s in CHECKING. Now in ACTIVE | |
| node0 | 6m 22.309s | 2025-10-30 11:24:01.776 | 9160 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 804 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node2 | 6m 22.399s | 2025-10-30 11:24:01.866 | 9349 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 804 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 6m 22.427s | 2025-10-30 11:24:01.894 | 9214 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 804 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node1 | 6m 22.447s | 2025-10-30 11:24:01.914 | 9262 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 804 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node4 | 6m 22.496s | 2025-10-30 11:24:01.963 | 1601 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 804 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 6m 22.568s | 2025-10-30 11:24:02.035 | 9220 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 804 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/804 | |
| node3 | 6m 22.569s | 2025-10-30 11:24:02.036 | 9221 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/50 for round 804 | |
| node4 | 6m 22.587s | 2025-10-30 11:24:02.054 | 1607 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 804 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/804 | |
| node4 | 6m 22.588s | 2025-10-30 11:24:02.055 | 1608 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/10 for round 804 | |
| node1 | 6m 22.614s | 2025-10-30 11:24:02.081 | 9268 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 804 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/804 | |
| node1 | 6m 22.615s | 2025-10-30 11:24:02.082 | 9269 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/52 for round 804 | |
| node2 | 6m 22.638s | 2025-10-30 11:24:02.105 | 9355 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 804 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/804 | |
| node2 | 6m 22.639s | 2025-10-30 11:24:02.106 | 9356 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/50 for round 804 | |
| node3 | 6m 22.655s | 2025-10-30 11:24:02.122 | 9256 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/50 for round 804 | |
| node0 | 6m 22.656s | 2025-10-30 11:24:02.123 | 9166 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 804 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/804 | |
| node0 | 6m 22.657s | 2025-10-30 11:24:02.124 | 9167 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/50 for round 804 | |
| node3 | 6m 22.658s | 2025-10-30 11:24:02.125 | 9257 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 804 Timestamp: 2025-10-30T11:24:00.192564Z Next consensus number: 24607 Legacy running event hash: c6a908224e808ed22b6a615adb4c237075d199fde36ca8486c76e81c48da7245fe35edbe8860959e78496b99a39d21fe Legacy running event mnemonic: cliff-affair-source-chair Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1856532946 Root hash: 03f733fe6c848f31cf3a1440404cde27c123200e585b67d5c569691d4c6f18617027e5b8c615fd9415e0dc80261aa0ba (root) VirtualMap state / leave-lemon-mountain-boring | |||||||||
| node3 | 6m 22.665s | 2025-10-30 11:24:02.132 | 9258 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces Last file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 6m 22.665s | 2025-10-30 11:24:02.132 | 9259 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 777 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node3 | 6m 22.667s | 2025-10-30 11:24:02.134 | 9260 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 6m 22.673s | 2025-10-30 11:24:02.140 | 9261 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 6m 22.674s | 2025-10-30 11:24:02.141 | 9262 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 804 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/804 {"round":804,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/804/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 6m 22.675s | 2025-10-30 11:24:02.142 | 9263 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/139 | |
| node1 | 6m 22.691s | 2025-10-30 11:24:02.158 | 9304 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/52 for round 804 | |
| node1 | 6m 22.693s | 2025-10-30 11:24:02.160 | 9305 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 804 Timestamp: 2025-10-30T11:24:00.192564Z Next consensus number: 24607 Legacy running event hash: c6a908224e808ed22b6a615adb4c237075d199fde36ca8486c76e81c48da7245fe35edbe8860959e78496b99a39d21fe Legacy running event mnemonic: cliff-affair-source-chair Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1856532946 Root hash: 03f733fe6c848f31cf3a1440404cde27c123200e585b67d5c569691d4c6f18617027e5b8c615fd9415e0dc80261aa0ba (root) VirtualMap state / leave-lemon-mountain-boring | |||||||||
| node1 | 6m 22.700s | 2025-10-30 11:24:02.167 | 9322 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 6m 22.700s | 2025-10-30 11:24:02.167 | 9323 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 777 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 6m 22.700s | 2025-10-30 11:24:02.167 | 9324 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 6m 22.706s | 2025-10-30 11:24:02.173 | 9325 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 6m 22.706s | 2025-10-30 11:24:02.173 | 9326 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 804 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/804 {"round":804,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/804/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 6m 22.708s | 2025-10-30 11:24:02.175 | 9327 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/139 | |
| node2 | 6m 22.720s | 2025-10-30 11:24:02.187 | 9399 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/50 for round 804 | |
| node2 | 6m 22.722s | 2025-10-30 11:24:02.189 | 9400 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 804 Timestamp: 2025-10-30T11:24:00.192564Z Next consensus number: 24607 Legacy running event hash: c6a908224e808ed22b6a615adb4c237075d199fde36ca8486c76e81c48da7245fe35edbe8860959e78496b99a39d21fe Legacy running event mnemonic: cliff-affair-source-chair Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1856532946 Root hash: 03f733fe6c848f31cf3a1440404cde27c123200e585b67d5c569691d4c6f18617027e5b8c615fd9415e0dc80261aa0ba (root) VirtualMap state / leave-lemon-mountain-boring | |||||||||
| node4 | 6m 22.726s | 2025-10-30 11:24:02.193 | 1665 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/10 for round 804 | |
| node4 | 6m 22.728s | 2025-10-30 11:24:02.195 | 1666 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 804 Timestamp: 2025-10-30T11:24:00.192564Z Next consensus number: 24607 Legacy running event hash: c6a908224e808ed22b6a615adb4c237075d199fde36ca8486c76e81c48da7245fe35edbe8860959e78496b99a39d21fe Legacy running event mnemonic: cliff-affair-source-chair Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1856532946 Root hash: 03f733fe6c848f31cf3a1440404cde27c123200e585b67d5c569691d4c6f18617027e5b8c615fd9415e0dc80261aa0ba (root) VirtualMap state / leave-lemon-mountain-boring | |||||||||
| node2 | 6m 22.729s | 2025-10-30 11:24:02.196 | 9401 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 6m 22.729s | 2025-10-30 11:24:02.196 | 9402 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 777 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 6m 22.731s | 2025-10-30 11:24:02.198 | 9403 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 6m 22.736s | 2025-10-30 11:24:02.203 | 9202 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/50 for round 804 | |
| node0 | 6m 22.737s | 2025-10-30 11:24:02.204 | 9203 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 804 Timestamp: 2025-10-30T11:24:00.192564Z Next consensus number: 24607 Legacy running event hash: c6a908224e808ed22b6a615adb4c237075d199fde36ca8486c76e81c48da7245fe35edbe8860959e78496b99a39d21fe Legacy running event mnemonic: cliff-affair-source-chair Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 1856532946 Root hash: 03f733fe6c848f31cf3a1440404cde27c123200e585b67d5c569691d4c6f18617027e5b8c615fd9415e0dc80261aa0ba (root) VirtualMap state / leave-lemon-mountain-boring | |||||||||
| node2 | 6m 22.737s | 2025-10-30 11:24:02.204 | 9404 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 6m 22.737s | 2025-10-30 11:24:02.204 | 9405 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 804 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/804 {"round":804,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/804/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 6m 22.738s | 2025-10-30 11:24:02.205 | 1667 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr378_orgn0.pces Last file: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+23+48.309038832Z_seq1_minr745_maxr1245_orgn772.pces | |||||||||
| node4 | 6m 22.738s | 2025-10-30 11:24:02.205 | 1668 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 777 File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+23+48.309038832Z_seq1_minr745_maxr1245_orgn772.pces | |||||||||
| node4 | 6m 22.738s | 2025-10-30 11:24:02.205 | 1669 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 6m 22.739s | 2025-10-30 11:24:02.206 | 9406 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/139 | |
| node4 | 6m 22.741s | 2025-10-30 11:24:02.208 | 1670 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 6m 22.742s | 2025-10-30 11:24:02.209 | 1671 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 804 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/804 {"round":804,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/804/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 6m 22.744s | 2025-10-30 11:24:02.211 | 1672 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/1 | |
| node0 | 6m 22.746s | 2025-10-30 11:24:02.213 | 9204 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 6m 22.746s | 2025-10-30 11:24:02.213 | 9205 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 777 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 6m 22.749s | 2025-10-30 11:24:02.216 | 9206 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node0 | 6m 22.755s | 2025-10-30 11:24:02.222 | 9207 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 6m 22.755s | 2025-10-30 11:24:02.222 | 9208 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 804 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/804 {"round":804,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/804/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node0 | 6m 22.757s | 2025-10-30 11:24:02.224 | 9209 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/139 | |
| node1 | 7m 21.715s | 2025-10-30 11:25:01.182 | 10766 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 933 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node0 | 7m 21.728s | 2025-10-30 11:25:01.195 | 10648 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 933 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node4 | 7m 21.783s | 2025-10-30 11:25:01.250 | 3085 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 933 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node2 | 7m 21.800s | 2025-10-30 11:25:01.267 | 10813 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 933 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 7m 21.858s | 2025-10-30 11:25:01.325 | 10680 | INFO | STATE_TO_DISK | <<scheduler TransactionHandler>> | DefaultSavedStateController: | Signed state from round 933 created, will eventually be written to disk, for reason: PERIODIC_SNAPSHOT | |
| node3 | 7m 21.898s | 2025-10-30 11:25:01.365 | 10683 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 933 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/933 | |
| node3 | 7m 21.899s | 2025-10-30 11:25:01.366 | 10684 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/57 for round 933 | |
| node2 | 7m 21.946s | 2025-10-30 11:25:01.413 | 10816 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 933 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/933 | |
| node2 | 7m 21.946s | 2025-10-30 11:25:01.413 | 10817 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/57 for round 933 | |
| node3 | 7m 21.979s | 2025-10-30 11:25:01.446 | 10723 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/57 for round 933 | |
| node3 | 7m 21.981s | 2025-10-30 11:25:01.448 | 10724 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 933 Timestamp: 2025-10-30T11:25:00.273784Z Next consensus number: 29398 Legacy running event hash: bc75385d4e7d87d140142b75b1fe7dff47a47c9109198a0833449aa72186b1130a39dd9ea9409cc9e33fd48b60e67f27 Legacy running event mnemonic: imitate-arm-soul-album Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 558976304 Root hash: 1085d72f747ba0ce2be310bd1c43136c0e67745181ab85f6abd142ac31416cee5c2c158eac6da3bcc09a892fc7c1cadb (root) VirtualMap state / awful-boss-month-town | |||||||||
| node3 | 7m 21.987s | 2025-10-30 11:25:01.454 | 10725 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces Last file: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+17+55.702916994Z_seq0_minr1_maxr501_orgn0.pces | |||||||||
| node3 | 7m 21.987s | 2025-10-30 11:25:01.454 | 10726 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 906 File: data/saved/preconsensus-events/3/2025/10/30/2025-10-30T11+21+48.650026277Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node3 | 7m 21.988s | 2025-10-30 11:25:01.455 | 10727 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node3 | 7m 21.996s | 2025-10-30 11:25:01.463 | 10728 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node3 | 7m 21.997s | 2025-10-30 11:25:01.464 | 10729 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 933 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/933 {"round":933,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/933/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node3 | 7m 21.998s | 2025-10-30 11:25:01.465 | 10730 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/3/123/262 | |
| node1 | 7m 22.016s | 2025-10-30 11:25:01.483 | 10769 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 933 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/933 | |
| node1 | 7m 22.017s | 2025-10-30 11:25:01.484 | 10770 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/59 for round 933 | |
| node2 | 7m 22.022s | 2025-10-30 11:25:01.489 | 10848 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/57 for round 933 | |
| node2 | 7m 22.024s | 2025-10-30 11:25:01.491 | 10849 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 933 Timestamp: 2025-10-30T11:25:00.273784Z Next consensus number: 29398 Legacy running event hash: bc75385d4e7d87d140142b75b1fe7dff47a47c9109198a0833449aa72186b1130a39dd9ea9409cc9e33fd48b60e67f27 Legacy running event mnemonic: imitate-arm-soul-album Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 558976304 Root hash: 1085d72f747ba0ce2be310bd1c43136c0e67745181ab85f6abd142ac31416cee5c2c158eac6da3bcc09a892fc7c1cadb (root) VirtualMap state / awful-boss-month-town | |||||||||
| node2 | 7m 22.031s | 2025-10-30 11:25:01.498 | 10850 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+17+55.762411602Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 7m 22.031s | 2025-10-30 11:25:01.498 | 10851 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 906 File: data/saved/preconsensus-events/2/2025/10/30/2025-10-30T11+21+48.564521852Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node2 | 7m 22.031s | 2025-10-30 11:25:01.498 | 10852 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node2 | 7m 22.040s | 2025-10-30 11:25:01.507 | 10853 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node2 | 7m 22.040s | 2025-10-30 11:25:01.507 | 10854 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 933 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/933 {"round":933,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/933/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node2 | 7m 22.042s | 2025-10-30 11:25:01.509 | 10855 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/2/123/262 | |
| node4 | 7m 22.060s | 2025-10-30 11:25:01.527 | 3088 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 933 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/933 | |
| node4 | 7m 22.061s | 2025-10-30 11:25:01.528 | 3089 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/17 for round 933 | |
| node1 | 7m 22.101s | 2025-10-30 11:25:01.568 | 10809 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/59 for round 933 | |
| node1 | 7m 22.103s | 2025-10-30 11:25:01.570 | 10810 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 933 Timestamp: 2025-10-30T11:25:00.273784Z Next consensus number: 29398 Legacy running event hash: bc75385d4e7d87d140142b75b1fe7dff47a47c9109198a0833449aa72186b1130a39dd9ea9409cc9e33fd48b60e67f27 Legacy running event mnemonic: imitate-arm-soul-album Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 558976304 Root hash: 1085d72f747ba0ce2be310bd1c43136c0e67745181ab85f6abd142ac31416cee5c2c158eac6da3bcc09a892fc7c1cadb (root) VirtualMap state / awful-boss-month-town | |||||||||
| node0 | 7m 22.108s | 2025-10-30 11:25:01.575 | 10651 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Started writing round 933 state to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/933 | |
| node0 | 7m 22.109s | 2025-10-30 11:25:01.576 | 10652 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Creating a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/57 for round 933 | |
| node1 | 7m 22.111s | 2025-10-30 11:25:01.578 | 10811 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+17+55.765589321Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 7m 22.112s | 2025-10-30 11:25:01.579 | 10812 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 906 File: data/saved/preconsensus-events/1/2025/10/30/2025-10-30T11+21+48.653744187Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node1 | 7m 22.112s | 2025-10-30 11:25:01.579 | 10813 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node1 | 7m 22.121s | 2025-10-30 11:25:01.588 | 10814 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node1 | 7m 22.122s | 2025-10-30 11:25:01.589 | 10815 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 933 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/933 {"round":933,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/933/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node1 | 7m 22.123s | 2025-10-30 11:25:01.590 | 10816 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/1/123/262 | |
| node0 | 7m 22.188s | 2025-10-30 11:25:01.655 | 10683 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/57 for round 933 | |
| node0 | 7m 22.190s | 2025-10-30 11:25:01.657 | 10684 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 933 Timestamp: 2025-10-30T11:25:00.273784Z Next consensus number: 29398 Legacy running event hash: bc75385d4e7d87d140142b75b1fe7dff47a47c9109198a0833449aa72186b1130a39dd9ea9409cc9e33fd48b60e67f27 Legacy running event mnemonic: imitate-arm-soul-album Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 558976304 Root hash: 1085d72f747ba0ce2be310bd1c43136c0e67745181ab85f6abd142ac31416cee5c2c158eac6da3bcc09a892fc7c1cadb (root) VirtualMap state / awful-boss-month-town | |||||||||
| node0 | 7m 22.197s | 2025-10-30 11:25:01.664 | 10693 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+17+55.593649618Z_seq0_minr1_maxr501_orgn0.pces Last file: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 7m 22.197s | 2025-10-30 11:25:01.664 | 10694 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 906 File: data/saved/preconsensus-events/0/2025/10/30/2025-10-30T11+21+48.654178262Z_seq1_minr474_maxr5474_orgn0.pces | |||||||||
| node0 | 7m 22.198s | 2025-10-30 11:25:01.665 | 10695 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node4 | 7m 22.206s | 2025-10-30 11:25:01.673 | 3126 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | MerkleTreeSnapshotWriter: | Successfully created a snapshot on demand in /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/swirlds-tmp/17 for round 933 | |
| node0 | 7m 22.207s | 2025-10-30 11:25:01.674 | 10696 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node0 | 7m 22.207s | 2025-10-30 11:25:01.674 | 10697 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 933 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/933 {"round":933,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/933/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node0 | 7m 22.209s | 2025-10-30 11:25:01.676 | 10698 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/0/123/262 | |
| node4 | 7m 22.209s | 2025-10-30 11:25:01.676 | 3127 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Information for state written to disk: | |
| Round: 933 Timestamp: 2025-10-30T11:25:00.273784Z Next consensus number: 29398 Legacy running event hash: bc75385d4e7d87d140142b75b1fe7dff47a47c9109198a0833449aa72186b1130a39dd9ea9409cc9e33fd48b60e67f27 Legacy running event mnemonic: imitate-arm-soul-album Rounds non-ancient: 26 Creation version: SemanticVersion[major=1, minor=0, patch=0, pre=, build=] Minimum judge hash code: 558976304 Root hash: 1085d72f747ba0ce2be310bd1c43136c0e67745181ab85f6abd142ac31416cee5c2c158eac6da3bcc09a892fc7c1cadb (root) VirtualMap state / awful-boss-month-town | |||||||||
| node4 | 7m 22.219s | 2025-10-30 11:25:01.686 | 3128 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 2 preconsensus files on disk. | |
| First file: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+17+55.716747507Z_seq0_minr1_maxr378_orgn0.pces Last file: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+23+48.309038832Z_seq1_minr745_maxr1245_orgn772.pces | |||||||||
| node4 | 7m 22.220s | 2025-10-30 11:25:01.687 | 3129 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Found 1 preconsensus event file meeting specified criteria to copy. | |
| Lower bound: 906 File: data/saved/preconsensus-events/4/2025/10/30/2025-10-30T11+23+48.309038832Z_seq1_minr745_maxr1245_orgn772.pces | |||||||||
| node4 | 7m 22.220s | 2025-10-30 11:25:01.687 | 3130 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Copying 1 preconsensus event file(s) | |
| node4 | 7m 22.225s | 2025-10-30 11:25:01.692 | 3131 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | BestEffortPcesFileCopy: | Finished copying 1 preconsensus event file(s) | |
| node4 | 7m 22.226s | 2025-10-30 11:25:01.693 | 3132 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | SignedStateFileWriter: | Finished writing state for round 933 to disk. Reason: PERIODIC_SNAPSHOT, directory: /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/933 {"round":933,"freezeState":false,"reason":"PERIODIC_SNAPSHOT","directory":"file:///opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/933/"} [com.swirlds.logging.legacy.payload.StateSavedToDiskPayload] | |
| node4 | 7m 22.227s | 2025-10-30 11:25:01.694 | 3133 | INFO | STATE_TO_DISK | <<scheduler StateSnapshotManager>> | FileUtils: | deleting directory /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.swirlds.demo.consistency.ConsistencyTestingToolMain/4/123/11 | |
| node3 | 8.015m | 2025-10-30 11:25:40.348 | 11634 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith0 3 to 0>> | NetworkUtils: | Connection broken: 3 <- 0 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:40.346962518Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node4 | 8.015m | 2025-10-30 11:25:40.354 | 4043 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith0 4 to 0>> | NetworkUtils: | Connection broken: 4 <- 0 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:40.349972915Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node3 | 8.016m | 2025-10-30 11:25:40.424 | 11635 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith2 3 to 2>> | NetworkUtils: | Connection broken: 3 <- 2 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:40.423380259Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node4 | 8.016m | 2025-10-30 11:25:40.426 | 4044 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith2 4 to 2>> | NetworkUtils: | Connection broken: 4 <- 2 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:40.423577122Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node3 | 8m 1.061s | 2025-10-30 11:25:40.528 | 11636 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith1 3 to 1>> | NetworkUtils: | Connection broken: 3 <- 1 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:40.524702491Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node4 | 8m 1.062s | 2025-10-30 11:25:40.529 | 4045 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith1 4 to 1>> | NetworkUtils: | Connection broken: 4 <- 1 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:40.525315149Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||
| node4 | 8m 1.763s | 2025-10-30 11:25:41.230 | 4046 | WARN | SOCKET_EXCEPTIONS | <<platform-core: SyncProtocolWith3 4 to 3>> | NetworkUtils: | Connection broken: 4 <- 3 | |
| com.swirlds.common.threading.pool.ParallelExecutionException: Time thrown: 2025-10-30T11:25:41.226158473Z at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:160) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.runProtocol(RpcPeerProtocol.java:293) at com.swirlds.platform.network.communication.states.ProtocolNegotiated.transition(ProtocolNegotiated.java:47) at com.swirlds.platform.network.communication.Negotiator.execute(Negotiator.java:79) at com.swirlds.platform.network.communication.ProtocolNegotiatorThread.run(ProtocolNegotiatorThread.java:70) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.doWork(StoppableThreadImpl.java:599) at com.swirlds.common.threading.framework.internal.StoppableThreadImpl.run(StoppableThreadImpl.java:200) at com.swirlds.common.threading.framework.internal.AbstractThreadConfiguration.lambda$wrapRunnableWithSnapshot$3(AbstractThreadConfiguration.java:654) at java.base/java.lang.Thread.run(Thread.java:1583) Suppressed: java.util.concurrent.ExecutionException: java.net.SocketException: Connection or outbound has closed at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection or outbound has closed at java.base/sun.security.ssl.SSLSocketImpl$AppOutputStream.write(SSLSocketImpl.java:1297) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.write(AbstractStreamExtension.java:115) at com.swirlds.common.io.extendable.ExtendableOutputStream.write(ExtendableOutputStream.java:64) at java.base/java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:125) at java.base/java.io.BufferedOutputStream.implFlush(BufferedOutputStream.java:252) at java.base/java.io.BufferedOutputStream.flush(BufferedOutputStream.java:240) at java.base/java.io.DataOutputStream.flush(DataOutputStream.java:131) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.writeMessages(RpcPeerProtocol.java:384) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$2(RpcPeerProtocol.java:297) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more Caused by: java.util.concurrent.ExecutionException: java.net.SocketException: Connection reset at java.base/java.util.concurrent.FutureTask.report(FutureTask.java:122) at java.base/java.util.concurrent.FutureTask.get(FutureTask.java:191) at com.swirlds.common.threading.pool.CachedPoolParallelExecutor.doParallelWithHandler(CachedPoolParallelExecutor.java:154) ... 8 more Caused by: java.net.SocketException: Connection reset at java.base/sun.nio.ch.NioSocketImpl.implRead(NioSocketImpl.java:318) at java.base/sun.nio.ch.NioSocketImpl.read(NioSocketImpl.java:346) at java.base/sun.nio.ch.NioSocketImpl$1.read(NioSocketImpl.java:796) at java.base/java.net.Socket$SocketInputStream.read(Socket.java:1099) at java.base/sun.security.ssl.SSLSocketInputRecord.read(SSLSocketInputRecord.java:489) at java.base/sun.security.ssl.SSLSocketInputRecord.readHeader(SSLSocketInputRecord.java:483) at java.base/sun.security.ssl.SSLSocketInputRecord.bytesInCompletePacket(SSLSocketInputRecord.java:70) at java.base/sun.security.ssl.SSLSocketImpl.readApplicationRecord(SSLSocketImpl.java:1461) at java.base/sun.security.ssl.SSLSocketImpl$AppInputStream.read(SSLSocketImpl.java:1066) at com.swirlds.common.io.extendable.extensions.AbstractStreamExtension.read(AbstractStreamExtension.java:73) at com.swirlds.common.io.extendable.ExtendableInputStream.read(ExtendableInputStream.java:63) at java.base/java.io.BufferedInputStream.fill(BufferedInputStream.java:291) at java.base/java.io.BufferedInputStream.read1(BufferedInputStream.java:347) at java.base/java.io.BufferedInputStream.implRead(BufferedInputStream.java:420) at java.base/java.io.BufferedInputStream.read(BufferedInputStream.java:399) at java.base/java.io.DataInputStream.readFully(DataInputStream.java:208) at java.base/java.io.DataInputStream.readShort(DataInputStream.java:319) at org.hiero.base.io.streams.AugmentedDataInputStream.readShort(AugmentedDataInputStream.java:158) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.readMessages(RpcPeerProtocol.java:428) at com.swirlds.platform.network.protocol.rpc.RpcPeerProtocol.lambda$runProtocol$1(RpcPeerProtocol.java:296) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:24) at org.hiero.base.concurrent.ThrowingRunnable.call(ThrowingRunnable.java:9) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ... 2 more | |||||||||