file_path
stringlengths
21
224
content
stringlengths
0
80.8M
XiaomingY/omni-ies-viewer/tools/packman/bootstrap/fetch_file_from_packman_bootstrap.cmd
:: Copyright 2019 NVIDIA CORPORATION :: :: Licensed under the Apache License, Version 2.0 (the "License"); :: you may not use this file except in compliance with the License. :: You may obtain a copy of the License at :: :: http://www.apache.org/licenses/LICENSE-2.0 :: :: Unless required by applicable law or agreed to in writing, software :: distributed under the License is distributed on an "AS IS" BASIS, :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. :: See the License for the specific language governing permissions and :: limitations under the License. :: You need to specify <package-name> <target-path> as input to this command @setlocal @set PACKAGE_NAME=%1 @set TARGET_PATH=%2 @echo Fetching %PACKAGE_NAME% ... @powershell -ExecutionPolicy ByPass -NoLogo -NoProfile -File "%~dp0download_file_from_url.ps1" ^ -source "http://bootstrap.packman.nvidia.com/%PACKAGE_NAME%" -output %TARGET_PATH% :: A bug in powershell prevents the errorlevel code from being set when using the -File execution option :: We must therefore do our own failure analysis, basically make sure the file exists and is larger than 0 bytes: @if not exist %TARGET_PATH% goto ERROR_DOWNLOAD_FAILED @if %~z2==0 goto ERROR_DOWNLOAD_FAILED @endlocal @exit /b 0 :ERROR_DOWNLOAD_FAILED @echo Failed to download file from S3 @echo Most likely because endpoint cannot be reached or file %PACKAGE_NAME% doesn't exist @endlocal @exit /b 1
XiaomingY/omni-ies-viewer/tools/packman/bootstrap/download_file_from_url.ps1
<# Copyright 2019 NVIDIA CORPORATION Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. #> param( [Parameter(Mandatory=$true)][string]$source=$null, [string]$output="out.exe" ) $filename = $output $triesLeft = 3 do { $triesLeft -= 1 try { Write-Host "Downloading from bootstrap.packman.nvidia.com ..." $wc = New-Object net.webclient $wc.Downloadfile($source, $fileName) $triesLeft = 0 } catch { Write-Host "Error downloading $source!" Write-Host $_.Exception|format-list -force } } while ($triesLeft -gt 0)
XiaomingY/omni-ies-viewer/tools/packman/bootstrap/generate_temp_folder.ps1
<# Copyright 2019 NVIDIA CORPORATION Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. #> param( [Parameter(Mandatory=$true)][string]$parentPath=$null ) [string] $name = [System.Guid]::NewGuid() $out = Join-Path $parentPath $name New-Item -ItemType Directory -Path ($out) | Out-Null Write-Host $out # SIG # Begin signature block # MIIaVwYJKoZIhvcNAQcCoIIaSDCCGkQCAQExDzANBglghkgBZQMEAgEFADB5Bgor # BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG # KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCB29nsqMEu+VmSF # 7ckeVTPrEZ6hsXjOgPFlJm9ilgHUB6CCCiIwggTTMIIDu6ADAgECAhBi50XpIWUh # PJcfXEkK6hKlMA0GCSqGSIb3DQEBCwUAMIGEMQswCQYDVQQGEwJVUzEdMBsGA1UE # ChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNVBAsTFlN5bWFudGVjIFRydXN0 # IE5ldHdvcmsxNTAzBgNVBAMTLFN5bWFudGVjIENsYXNzIDMgU0hBMjU2IENvZGUg # U2lnbmluZyBDQSAtIEcyMB4XDTE4MDcwOTAwMDAwMFoXDTIxMDcwOTIzNTk1OVow # gYMxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRQwEgYDVQQHDAtT # YW50YSBDbGFyYTEbMBkGA1UECgwSTlZJRElBIENvcnBvcmF0aW9uMQ8wDQYDVQQL # DAZJVC1NSVMxGzAZBgNVBAMMEk5WSURJQSBDb3Jwb3JhdGlvbjCCASIwDQYJKoZI # hvcNAQEBBQADggEPADCCAQoCggEBALEZN63dA47T4i90jZ84CJ/aWUwVtLff8AyP # YspFfIZGdZYiMgdb8A5tBh7653y0G/LZL6CVUkgejcpvBU/Dl/52a+gSWy2qJ2bH # jMFMKCyQDhdpCAKMOUKSC9rfzm4cFeA9ct91LQCAait4LhLlZt/HF7aG+r0FgCZa # HJjJvE7KNY9G4AZXxjSt8CXS8/8NQMANqjLX1r+F+Hl8PzQ1fVx0mMsbdtaIV4Pj # 5flAeTUnz6+dCTx3vTUo8MYtkS2UBaQv7t7H2B7iwJDakEQKk1XHswJdeqG0osDU # z6+NVks7uWE1N8UIhvzbw0FEX/U2kpfyWaB/J3gMl8rVR8idPj8CAwEAAaOCAT4w # ggE6MAkGA1UdEwQCMAAwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUF # BwMDMGEGA1UdIARaMFgwVgYGZ4EMAQQBMEwwIwYIKwYBBQUHAgEWF2h0dHBzOi8v # ZC5zeW1jYi5jb20vY3BzMCUGCCsGAQUFBwICMBkMF2h0dHBzOi8vZC5zeW1jYi5j # b20vcnBhMB8GA1UdIwQYMBaAFNTABiJJ6zlL3ZPiXKG4R3YJcgNYMCsGA1UdHwQk # MCIwIKAeoByGGmh0dHA6Ly9yYi5zeW1jYi5jb20vcmIuY3JsMFcGCCsGAQUFBwEB # BEswSTAfBggrBgEFBQcwAYYTaHR0cDovL3JiLnN5bWNkLmNvbTAmBggrBgEFBQcw # AoYaaHR0cDovL3JiLnN5bWNiLmNvbS9yYi5jcnQwDQYJKoZIhvcNAQELBQADggEB # AIJKh5vKJdhHJtMzATmc1BmXIQ3RaJONOZ5jMHn7HOkYU1JP0OIzb4pXXkH8Xwfr # K6bnd72IhcteyksvKsGpSvK0PBBwzodERTAu1Os2N+EaakxQwV/xtqDm1E3IhjHk # fRshyKKzmFk2Ci323J4lHtpWUj5Hz61b8gd72jH7xnihGi+LORJ2uRNZ3YuqMNC3 # SBC8tAyoJqEoTJirULUCXW6wX4XUm5P2sx+htPw7szGblVKbQ+PFinNGnsSEZeKz # D8jUb++1cvgTKH59Y6lm43nsJjkZU77tNqyq4ABwgQRk6lt8cS2PPwjZvTmvdnla # ZhR0K4of+pQaUQHXVIBdji8wggVHMIIEL6ADAgECAhB8GzU1SufbdOdBXxFpymuo # MA0GCSqGSIb3DQEBCwUAMIG9MQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNp # Z24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNV # BAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl # IG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmlj # YXRpb24gQXV0aG9yaXR5MB4XDTE0MDcyMjAwMDAwMFoXDTI0MDcyMTIzNTk1OVow # gYQxCzAJBgNVBAYTAlVTMR0wGwYDVQQKExRTeW1hbnRlYyBDb3Jwb3JhdGlvbjEf # MB0GA1UECxMWU3ltYW50ZWMgVHJ1c3QgTmV0d29yazE1MDMGA1UEAxMsU3ltYW50 # ZWMgQ2xhc3MgMyBTSEEyNTYgQ29kZSBTaWduaW5nIENBIC0gRzIwggEiMA0GCSqG # SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDXlUPU3N9nrjn7UqS2JjEEcOm3jlsqujdp # NZWPu8Aw54bYc7vf69F2P4pWjustS/BXGE6xjaUz0wt1I9VqeSfdo9P3Dodltd6t # HPH1NbQiUa8iocFdS5B/wFlOq515qQLXHkmxO02H/sJ4q7/vUq6crwjZOeWaUT5p # XzAQTnFjbFjh8CAzGw90vlvLEuHbjMSAlHK79kWansElC/ujHJ7YpglwcezAR0yP # fcPeGc4+7gRyjhfT//CyBTIZTNOwHJ/+pXggQnBBsCaMbwDIOgARQXpBsKeKkQSg # mXj0d7TzYCrmbFAEtxRg/w1R9KiLhP4h2lxeffUpeU+wRHRvbXL/AgMBAAGjggF4 # MIIBdDAuBggrBgEFBQcBAQQiMCAwHgYIKwYBBQUHMAGGEmh0dHA6Ly9zLnN5bWNk # LmNvbTASBgNVHRMBAf8ECDAGAQH/AgEAMGYGA1UdIARfMF0wWwYLYIZIAYb4RQEH # FwMwTDAjBggrBgEFBQcCARYXaHR0cHM6Ly9kLnN5bWNiLmNvbS9jcHMwJQYIKwYB # BQUHAgIwGRoXaHR0cHM6Ly9kLnN5bWNiLmNvbS9ycGEwNgYDVR0fBC8wLTAroCmg # J4YlaHR0cDovL3Muc3ltY2IuY29tL3VuaXZlcnNhbC1yb290LmNybDATBgNVHSUE # DDAKBggrBgEFBQcDAzAOBgNVHQ8BAf8EBAMCAQYwKQYDVR0RBCIwIKQeMBwxGjAY # BgNVBAMTEVN5bWFudGVjUEtJLTEtNzI0MB0GA1UdDgQWBBTUwAYiSes5S92T4lyh # uEd2CXIDWDAfBgNVHSMEGDAWgBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG # 9w0BAQsFAAOCAQEAf+vKp+qLdkLrPo4gVDDjt7nc+kg+FscPRZUQzSeGo2bzAu1x # +KrCVZeRcIP5Un5SaTzJ8eCURoAYu6HUpFam8x0AkdWG80iH4MvENGggXrTL+QXt # nK9wUye56D5+UaBpcYvcUe2AOiUyn0SvbkMo0yF1u5fYi4uM/qkERgSF9xWcSxGN # xCwX/tVuf5riVpLxlrOtLfn039qJmc6yOETA90d7yiW5+ipoM5tQct6on9TNLAs0 # vYsweEDgjY4nG5BvGr4IFYFd6y/iUedRHsl4KeceZb847wFKAQkkDhbEFHnBQTc0 # 0D2RUpSd4WjvCPDiaZxnbpALGpNx1CYCw8BaIzGCD4swgg+HAgEBMIGZMIGEMQsw # CQYDVQQGEwJVUzEdMBsGA1UEChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNV # BAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxNTAzBgNVBAMTLFN5bWFudGVjIENs # YXNzIDMgU0hBMjU2IENvZGUgU2lnbmluZyBDQSAtIEcyAhBi50XpIWUhPJcfXEkK # 6hKlMA0GCWCGSAFlAwQCAQUAoHwwEAYKKwYBBAGCNwIBDDECMAAwGQYJKoZIhvcN # AQkDMQwGCisGAQQBgjcCAQQwHAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUw # LwYJKoZIhvcNAQkEMSIEIG5YDmcpqLxn4SB0H6OnuVkZRPh6OJ77eGW/6Su/uuJg # MA0GCSqGSIb3DQEBAQUABIIBAA3N2vqfA6WDgqz/7EoAKVIE5Hn7xpYDGhPvFAMV # BslVpeqE3apTcYFCEcwLtzIEc/zmpULxsX8B0SUT2VXbJN3zzQ80b+gbgpq62Zk+ # dQLOtLSiPhGW7MXLahgES6Oc2dUFaQ+wDfcelkrQaOVZkM4wwAzSapxuf/13oSIk # ZX2ewQEwTZrVYXELO02KQIKUR30s/oslGVg77ALnfK9qSS96Iwjd4MyT7PzCkHUi # ilwyGJi5a4ofiULiPSwUQNynSBqxa+JQALkHP682b5xhjoDfyG8laR234FTPtYgs # P/FaeviwENU5Pl+812NbbtRD+gKlWBZz+7FKykOT/CG8sZahgg1EMIINQAYKKwYB # BAGCNwMDATGCDTAwgg0sBgkqhkiG9w0BBwKggg0dMIINGQIBAzEPMA0GCWCGSAFl # AwQCAQUAMHcGCyqGSIb3DQEJEAEEoGgEZjBkAgEBBglghkgBhv1sBwEwMTANBglg # hkgBZQMEAgEFAAQgJhABfkDIPbI+nWYnA30FLTyaPK+W3QieT21B/vK+CMICEDF0 # worcGsdd7OxpXLP60xgYDzIwMjEwNDA4MDkxMTA5WqCCCjcwggT+MIID5qADAgEC # AhANQkrgvjqI/2BAIc4UAPDdMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNVBAYTAlVT # MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j # b20xMTAvBgNVBAMTKERpZ2lDZXJ0IFNIQTIgQXNzdXJlZCBJRCBUaW1lc3RhbXBp # bmcgQ0EwHhcNMjEwMTAxMDAwMDAwWhcNMzEwMTA2MDAwMDAwWjBIMQswCQYDVQQG # EwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xIDAeBgNVBAMTF0RpZ2lDZXJ0 # IFRpbWVzdGFtcCAyMDIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA # wuZhhGfFivUNCKRFymNrUdc6EUK9CnV1TZS0DFC1JhD+HchvkWsMlucaXEjvROW/ # m2HNFZFiWrj/ZwucY/02aoH6KfjdK3CF3gIY83htvH35x20JPb5qdofpir34hF0e # dsnkxnZ2OlPR0dNaNo/Go+EvGzq3YdZz7E5tM4p8XUUtS7FQ5kE6N1aG3JMjjfdQ # Jehk5t3Tjy9XtYcg6w6OLNUj2vRNeEbjA4MxKUpcDDGKSoyIxfcwWvkUrxVfbENJ # Cf0mI1P2jWPoGqtbsR0wwptpgrTb/FZUvB+hh6u+elsKIC9LCcmVp42y+tZji06l # chzun3oBc/gZ1v4NSYS9AQIDAQABo4IBuDCCAbQwDgYDVR0PAQH/BAQDAgeAMAwG # A1UdEwEB/wQCMAAwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwgwQQYDVR0gBDowODA2 # BglghkgBhv1sBwEwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5kaWdpY2VydC5j # b20vQ1BTMB8GA1UdIwQYMBaAFPS24SAd/imu0uRhpbKiJbLIFzVuMB0GA1UdDgQW # BBQ2RIaOpLqwZr68KC0dRDbd42p6vDBxBgNVHR8EajBoMDKgMKAuhixodHRwOi8v # Y3JsMy5kaWdpY2VydC5jb20vc2hhMi1hc3N1cmVkLXRzLmNybDAyoDCgLoYsaHR0 # cDovL2NybDQuZGlnaWNlcnQuY29tL3NoYTItYXNzdXJlZC10cy5jcmwwgYUGCCsG # AQUFBwEBBHkwdzAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29t # ME8GCCsGAQUFBzAChkNodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNl # cnRTSEEyQXNzdXJlZElEVGltZXN0YW1waW5nQ0EuY3J0MA0GCSqGSIb3DQEBCwUA # A4IBAQBIHNy16ZojvOca5yAOjmdG/UJyUXQKI0ejq5LSJcRwWb4UoOUngaVNFBUZ # B3nw0QTDhtk7vf5EAmZN7WmkD/a4cM9i6PVRSnh5Nnont/PnUp+Tp+1DnnvntN1B # Ion7h6JGA0789P63ZHdjXyNSaYOC+hpT7ZDMjaEXcw3082U5cEvznNZ6e9oMvD0y # 0BvL9WH8dQgAdryBDvjA4VzPxBFy5xtkSdgimnUVQvUtMjiB2vRgorq0Uvtc4GEk # JU+y38kpqHNDUdq9Y9YfW5v3LhtPEx33Sg1xfpe39D+E68Hjo0mh+s6nv1bPull2 # YYlffqe0jmd4+TaY4cso2luHpoovMIIFMTCCBBmgAwIBAgIQCqEl1tYyG35B5AXa # NpfCFTANBgkqhkiG9w0BAQsFADBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGln # aUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtE # aWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMTYwMTA3MTIwMDAwWhcNMzEw # MTA3MTIwMDAwWjByMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5j # MRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTEwLwYDVQQDEyhEaWdpQ2VydCBT # SEEyIEFzc3VyZWQgSUQgVGltZXN0YW1waW5nIENBMIIBIjANBgkqhkiG9w0BAQEF # AAOCAQ8AMIIBCgKCAQEAvdAy7kvNj3/dqbqCmcU5VChXtiNKxA4HRTNREH3Q+X1N # aH7ntqD0jbOI5Je/YyGQmL8TvFfTw+F+CNZqFAA49y4eO+7MpvYyWf5fZT/gm+vj # RkcGGlV+Cyd+wKL1oODeIj8O/36V+/OjuiI+GKwR5PCZA207hXwJ0+5dyJoLVOOo # CXFr4M8iEA91z3FyTgqt30A6XLdR4aF5FMZNJCMwXbzsPGBqrC8HzP3w6kfZiFBe # /WZuVmEnKYmEUeaC50ZQ/ZQqLKfkdT66mA+Ef58xFNat1fJky3seBdCEGXIX8RcG # 7z3N1k3vBkL9olMqT4UdxB08r8/arBD13ays6Vb/kwIDAQABo4IBzjCCAcowHQYD # VR0OBBYEFPS24SAd/imu0uRhpbKiJbLIFzVuMB8GA1UdIwQYMBaAFEXroq/0ksuC # MS1Ri6enIZ3zbcgPMBIGA1UdEwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGG # MBMGA1UdJQQMMAoGCCsGAQUFBwMIMHkGCCsGAQUFBwEBBG0wazAkBggrBgEFBQcw # AYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMEMGCCsGAQUFBzAChjdodHRwOi8v # Y2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3J0 # MIGBBgNVHR8EejB4MDqgOKA2hjRodHRwOi8vY3JsNC5kaWdpY2VydC5jb20vRGln # aUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMDqgOKA2hjRodHRwOi8vY3JsMy5kaWdp # Y2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMFAGA1UdIARJMEcw # OAYKYIZIAYb9bAACBDAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2Vy # dC5jb20vQ1BTMAsGCWCGSAGG/WwHATANBgkqhkiG9w0BAQsFAAOCAQEAcZUS6VGH # VmnN793afKpjerN4zwY3QITvS4S/ys8DAv3Fp8MOIEIsr3fzKx8MIVoqtwU0HWqu # mfgnoma/Capg33akOpMP+LLR2HwZYuhegiUexLoceywh4tZbLBQ1QwRostt1AuBy # x5jWPGTlH0gQGF+JOGFNYkYkh2OMkVIsrymJ5Xgf1gsUpYDXEkdws3XVk4WTfraS # Z/tTYYmo9WuWwPRYaQ18yAGxuSh1t5ljhSKMYcp5lH5Z/IwP42+1ASa2bKXuh1Eh # 5Fhgm7oMLSttosR+u8QlK0cCCHxJrhO24XxCQijGGFbPQTS2Zl22dHv1VjMiLyI2 # skuiSpXY9aaOUjGCAk0wggJJAgEBMIGGMHIxCzAJBgNVBAYTAlVTMRUwEwYDVQQK # EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xMTAvBgNV # BAMTKERpZ2lDZXJ0IFNIQTIgQXNzdXJlZCBJRCBUaW1lc3RhbXBpbmcgQ0ECEA1C # SuC+Ooj/YEAhzhQA8N0wDQYJYIZIAWUDBAIBBQCggZgwGgYJKoZIhvcNAQkDMQ0G # CyqGSIb3DQEJEAEEMBwGCSqGSIb3DQEJBTEPFw0yMTA0MDgwOTExMDlaMCsGCyqG # SIb3DQEJEAIMMRwwGjAYMBYEFOHXgqjhkb7va8oWkbWqtJSmJJvzMC8GCSqGSIb3 # DQEJBDEiBCDvFxQ6lYLr8vB+9czUl19rjCw1pWhhUXw/SqOmvIa/VDANBgkqhkiG # 9w0BAQEFAASCAQB9ox2UrcUXQsBI4Uycnhl4AMpvhVXJME62tygFMppW1l7QftDy # LvfPKRYm2YUioak/APxAS6geRKpeMkLvXuQS/Jlv0kY3BjxkeG0eVjvyjF4SvXbZ # 3JCk9m7wLNE+xqOo0ICjYlIJJgRLudjWkC5Skpb1NpPS8DOaIYwRV+AWaSOUPd9P # O5yVcnbl7OpK3EAEtwDrybCVBMPn2MGhAXybIHnth3+MFp1b6Blhz3WlReQyarjq # 1f+zaFB79rg6JswXoOTJhwICBP3hO2Ua3dMAswbfl+QNXF+igKLJPYnaeSVhBbm6 # VCu2io27t4ixqvoD0RuPObNX/P3oVA38afiM # SIG # End signature block
XiaomingY/omni-ies-viewer/tools/packman/bootstrap/install_package.py
# Copyright 2019 NVIDIA CORPORATION # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import zipfile import tempfile import sys import shutil __author__ = "hfannar" logging.basicConfig(level=logging.WARNING, format="%(message)s") logger = logging.getLogger("install_package") class TemporaryDirectory: def __init__(self): self.path = None def __enter__(self): self.path = tempfile.mkdtemp() return self.path def __exit__(self, type, value, traceback): # Remove temporary data created shutil.rmtree(self.path) def install_package(package_src_path, package_dst_path): with zipfile.ZipFile( package_src_path, allowZip64=True ) as zip_file, TemporaryDirectory() as temp_dir: zip_file.extractall(temp_dir) # Recursively copy (temp_dir will be automatically cleaned up on exit) try: # Recursive copy is needed because both package name and version folder could be missing in # target directory: shutil.copytree(temp_dir, package_dst_path) except OSError as exc: logger.warning( "Directory %s already present, packaged installation aborted" % package_dst_path ) else: logger.info("Package successfully installed to %s" % package_dst_path) install_package(sys.argv[1], sys.argv[2])
XiaomingY/omni-ies-viewer/exts/IESViewer/IESViewer/extension.py
import omni.ext import omni.ui as ui from omni.kit.viewport.utility import get_active_viewport_window from .viewport_scene import ViewportSceneInfo # Any class derived from `omni.ext.IExt` in top level module (defined in `python.modules` of `extension.toml`) will be # instantiated when extension gets enabled and `on_startup(ext_id)` will be called. Later when extension gets disabled # on_shutdown() is called. class AimingToolExtension(omni.ext.IExt): # ext_id is current extension id. It can be used with extension manager to query additional information, like where # this extension is located on filesystem. def __init__(self) -> None: super().__init__() self.viewport_scene = None def on_startup(self, ext_id): viewport_window = get_active_viewport_window() self.viewport_scene = ViewportSceneInfo(viewport_window, ext_id) def on_shutdown(self): if self.viewport_scene: self.viewport_scene.destroy() self.viewport_scene = None
XiaomingY/omni-ies-viewer/exts/IESViewer/IESViewer/viewport_scene.py
from omni.ui import scene as sc import omni.ui as ui from .object_info_manipulator import ObjInfoManipulator from .object_info_model import ObjInfoModel class ViewportSceneInfo(): """The Object Info Manipulator, placed into a Viewport""" def __init__(self, viewport_window, ext_id) -> None: self.scene_view = None self.viewport_window = viewport_window # NEW: Create a unique frame for our SceneView with self.viewport_window.get_frame(ext_id): # Create a default SceneView (it has a default camera-model) self.scene_view = sc.SceneView() # Add the manipulator into the SceneView's scene with self.scene_view.scene: ObjInfoManipulator(model=ObjInfoModel()) # Register the SceneView with the Viewport to get projection and view updates self.viewport_window.viewport_api.add_scene_view(self.scene_view) def __del__(self): self.destroy() def destroy(self): if self.scene_view: # Empty the SceneView of any elements it may have self.scene_view.scene.clear() # un-register the SceneView from Viewport updates if self.viewport_window: self.viewport_window.viewport_api.remove_scene_view(self.scene_view) # Remove our references to these objects self.viewport_window = None self.scene_view = None
XiaomingY/omni-ies-viewer/exts/IESViewer/IESViewer/__init__.py
from .extension import *
XiaomingY/omni-ies-viewer/exts/IESViewer/IESViewer/object_info_model.py
from pxr import Tf from pxr import Gf from pxr import Usd from pxr import UsdGeom from pxr import UsdShade from pxr import UsdLux from .IESReader import IESLight import os.path import numpy as np from omni.ui import scene as sc import omni.usd def _flatten_matrix(matrix: Gf.Matrix4d): m0, m1, m2, m3 = matrix[0], matrix[1], matrix[2], matrix[3] return [ m0[0], m0[1], m0[2], m0[3], m1[0], m1[1], m1[2], m1[3], m2[0], m2[1], m2[2], m2[3], m3[0], m3[1], m3[2], m3[3], ] class ObjInfoModel(sc.AbstractManipulatorModel): """ The model tracks the position and info of the selected object. """ class MatrixItem(sc.AbstractManipulatorItem): """ The Model Item represents the tranformation. It doesn't contain anything because we take the tranformation directly from USD when requesting. """ identity = [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1] def __init__(self): super().__init__() self.value = self.identity.copy() class PositionItem(sc.AbstractManipulatorItem): """ The Model Item represents the position. It doesn't contain anything because we take the position directly from USD when requesting. """ def __init__(self) -> None: super().__init__() self.value = [0, 0, 0] class PositionList(sc.AbstractManipulatorItem): """ The Model Item represents the position. It doesn't contain anything because we take the position directly from USD when requesting. """ def __init__(self) -> None: super().__init__() self.value = [[0,0,0]] def __init__(self) -> None: super().__init__() # Current selected prim list self.prim = [] self.current_path = [] self.material_name = [] self.stage_listener = None self.horizontal_step = 15 self.vertical_step = 15 self.IESPoints = [ObjInfoModel.PositionList()] self.transformation = [ObjInfoModel.MatrixItem()] # Save the UsdContext name (we currently only work with a single Context) self.usd_context = self._get_context() # Track selection changes self.events = self.usd_context.get_stage_event_stream() self.stage_event_delegate = self.events.create_subscription_to_pop( self.on_stage_event, name="Object Info Selection Update" ) @property def _time(self): return Usd.TimeCode.Default() def _get_context(self) -> Usd.Stage: # Get the UsdContext we are attached to return omni.usd.get_context() #Update when light are transformed or modified def notice_changed(self, notice: Usd.Notice, stage: Usd.Stage) -> None: """Called by Tf.Notice. Used when the current selected object changes in some way.""" light_path = self.current_path if not light_path: return for p in notice.GetChangedInfoOnlyPaths(): prim_path = p.GetPrimPath().pathString #check if prim_path not in selected list but parent of prim_path is in selected list if prim_path not in light_path: if (True in (light_path_item.startswith(prim_path) for light_path_item in light_path)): if UsdGeom.Xformable.IsTransformationAffectedByAttrNamed(p.name): self._item_changed(self.transformation[0]) continue if UsdGeom.Xformable.IsTransformationAffectedByAttrNamed(p.name): self._item_changed(self.transformation[0]) #if light property changed such as ies file changed, update profile self._item_changed(self.transformation[0]) def _get_transform(self, time: Usd.TimeCode): """Returns world transform of currently selected object""" if not self.prim: return [ObjInfoModel.MatrixItem.identity.copy()] # Compute matrix from world-transform in USD #get transform matrix for each selected light world_xform_list = [UsdGeom.BasisCurves(prim).ComputeLocalToWorldTransform(time) for prim in self.prim] # Flatten Gf.Matrix4d to list return [_flatten_matrix(world_xform) for world_xform in world_xform_list] def get_item(self, identifier): if identifier == "IESPoints": return self.IESPoints if identifier == "transformation": return self.transformation def get_as_floats(self, item): if item == self.transformation: return self._get_transform(self._time) if item == self.IESPoints: return self.get_points(self._time) return [] #get ies points for each selected light def get_points(self, time: Usd.TimeCode): if not self.prim: return [[0,0,0]] allIESPoint = [] for prim in self.prim: iesFile = prim.GetAttribute('shaping:ies:file').Get() allIESPoint.append(IESLight(str(iesFile).replace('@', '')).points) return allIESPoint def on_stage_event(self, event): """Called by stage_event_stream. We only care about selection changes.""" if event.type == int(omni.usd.StageEventType.SELECTION_CHANGED): self.current_path = [] self.prim = [] primList = [] primPathList = [] usd_context = self._get_context() stage = usd_context.get_stage() if not stage: return prim_paths = usd_context.get_selection().get_selected_prim_paths() if not prim_paths: # This turns off the manipulator when everything is deselected self._item_changed(self.transformation[0]) return #select light with ies file applied. lightCount = 0 for i in prim_paths: prim = stage.GetPrimAtPath(i) if(UsdLux.Light(prim) and prim.GetAttribute('shaping:ies:file').Get() and not (prim.IsA(UsdLux.DistantLight))): primList.append(prim) primPathList.append(i) lightCount = lightCount +1 if(lightCount==0): if self.stage_listener: self.stage_listener.Revoke() self.stage_listener = None self._item_changed(self.transformation[0]) return if not self.stage_listener: # This handles camera movement self.stage_listener = Tf.Notice.Register(Usd.Notice.ObjectsChanged, self.notice_changed, stage) self.prim = primList self.current_path = primPathList # Position is changed because new selected object has a different position self._item_changed(self.transformation[0]) def destroy(self): self.events = None self.stage_event_delegate.unsubscribe()
XiaomingY/omni-ies-viewer/exts/IESViewer/IESViewer/object_info_manipulator.py
from __future__ import division from omni.ui import scene as sc from omni.ui import color as cl import omni.ui as ui import numpy as np class ObjInfoManipulator(sc.Manipulator): """Manipulator that displays the object path and material assignment with a leader line to the top of the object's bounding box. """ def on_build(self): """Called when the model is changed and rebuilds the whole manipulator""" if not self.model: return IESPoints = self.model.get_as_floats(self.model.IESPoints) numHorizontal = int((360/self.model.horizontal_step)+1) primCount = 0 for transformation in self.model.get_as_floats(self.model.transformation): self.__root_xf = sc.Transform(transformation) with self.__root_xf: self._x_xform = sc.Transform() with self._x_xform: self._shape_xform = sc.Transform() IESPoint = IESPoints[primCount] numVertical = int(len(IESPoint)/numHorizontal) for index in range(0,numHorizontal): points = IESPoint[index*numVertical:(index+1)*numVertical] if(len(points)>0): sc.Curve(points.tolist(), thicknesses=[1.0], colors=[cl.yellow],tessellation=9) primCount = primCount+1 def on_model_updated(self, item): # Regenerate the manipulator self.invalidate()
XiaomingY/omni-ies-viewer/exts/IESViewer/IESViewer/IESReader.py
import numpy as np import re import math #import matplotlib.pyplot as plt from scipy import interpolate import os.path #from mpl_toolkits.mplot3d.axes3d import Axes3D import omni.ext import omni.ui as ui omni.kit.pipapi.install("astropy") from astropy.coordinates import spherical_to_cartesian DEFAULT_HORIZONTAL_STEP = 15 DEFAULT_VERTICAL_STEP = 15 IES_MaxLength = 80 class IESLight(): def __init__(self,iesFile): # Current selected prim if iesFile and os.path.exists(iesFile): self.file = iesFile else: return self.width = 0 self.length = 0 self.radius = 0 all_values = self.readIESfile(self.file) verticalAngles,horizontalAngles,intensities,self.width,self.length,self.radius = self.getIESproperties(all_values) horizontalAnglesMirrored, intensityMirrored = self.mirrorAngles(horizontalAngles,intensities) horizontalResampled = np.arange(0, 361, DEFAULT_HORIZONTAL_STEP) verticalResampled = np.arange(0, verticalAngles[-1]+1, DEFAULT_VERTICAL_STEP) resampledIntensity = self.interpolateIESValues(np.array(horizontalAnglesMirrored),np.array(verticalAngles),horizontalResampled,verticalResampled,intensityMirrored) self.points = self.IESCoord2XYZ(horizontalResampled,verticalResampled,resampledIntensity,IES_MaxLength) #read ies files and return vertical angles, horizontal angles, intensities, width, length, radius. #based on the symmetry, horizontal angles and resampled def readIESfile(self, fileName): f=open(fileName, encoding = "ISO-8859-1")#need rb to read \r\n correctly. Otherwise universial newline function ignores carriage return. startReading = 0 line = f.readline() allValues = "" while line: if( not(line.strip())): break else: #after this line, there are actual useful values if("TILT=NONE" in line.strip()): line = f.readline() startReading = 1 #read all number to one string if(startReading): allValues = allValues+line line = f.readline() f.close() #one array with all values dimentions = re.split('\s+',allValues.strip()) return dimentions def getIESproperties(self, allValues): #return FEET2METER = 0.3048 verticalAngles = [] horizontalAngles = [] width = 0 length = 0 radius = 0 intensityMultiplier = 1 numberVerticalAngle = 0 numberHorizontalAngle = 0 unit = 1 #1 for feet, 2 for meter #number of vertical angles and horizontal angles measured numberVerticalAngle = int(allValues[3]) numberHorizontalAngle = int(allValues[4]) #check if shape is rectangle or disk if(float(allValues[7])<0): radius = allValues[7]*-1 else: width = allValues[7] length = allValues[8] #convert dimentions to meter if measured in feet if(float(allValues[6])==1): radius = radius*FEET2METER width = width *FEET2METER length = length * FEET2METER #the actual vertical angles and horizontal angles in list verticalAngles = list(map(float, allValues[13:13+numberVerticalAngle])) horizontalAngles = list(map(float,allValues[13+numberVerticalAngle:13+numberVerticalAngle+numberHorizontalAngle])) #read intensities and convert it to 2d array intensities = np.array(allValues[13+numberVerticalAngle+numberHorizontalAngle:len(allValues)]) intensities = intensities.reshape(numberHorizontalAngle,numberVerticalAngle).astype(np.float16) return verticalAngles,horizontalAngles,intensities,width,length,radius #ies could have several symmetry: #(1)only measured in one horizontal angle (0) which need to be repeated to all horizontal angle from 0 to 360 #(2)only measured in horizontal angles (0~90) which need to be mirrored twice to horizontal angle from 0 to 360 #(3)only measured in horizontal angles (0~180) which need to be mirrored to horizontal angle from 0 to 360 #(4)only measured in horizontal angles (0~360) which could be used directly def mirrorAngles(self, horizontalAngles,intensities): #make use of symmetry in the file and produce horizontal angles from 0~360 if(horizontalAngles[-1]==0): horizontalAnglesMirrored = list(np.arange(0,361,DEFAULT_HORIZONTAL_STEP)) else: horizontalAnglesMirrored = list(np.arange(0,361,horizontalAngles[-1]/(len(horizontalAngles)-1))) #make use of symmetry in the file and copy intensitys for horizontal angles from 0~360 if(horizontalAngles[-1]==90): #mirror results [90:180] a = np.concatenate((intensities, np.flip(intensities, 0)[1:]), axis=0) intensityMirrored = np.concatenate((a, np.flip(a, 0)[1:]), axis=0) elif(horizontalAngles[-1]==180): intensityMirrored = np.concatenate((intensities, np.flip(intensities, 0)[1:]), axis=0) elif(horizontalAngles[-1]==0): intensityMirrored = np.array(([intensities[0],]*len(np.arange(0,361,DEFAULT_HORIZONTAL_STEP)))) else: #print("Symmetry 360") intensityMirrored = intensities return horizontalAnglesMirrored, intensityMirrored def IESCoord2XYZ(self, horizontalAngles,verticalAngles,intensity,maxLength): maxValue = np.amax(intensity) if(maxValue>maxLength): intensity = intensity*(maxLength/maxValue) for index, horizontalAngle in enumerate(horizontalAngles): if(index ==0): #Omniverse and 3ds Max makes the light upside down, horizontal angle rotation direction need to be flipped. points = np.array(spherical_to_cartesian(intensity[index].tolist(), [math.radians(90-x) for x in verticalAngles], [math.radians(-1*horizontalAngle)]*len(verticalAngles))).transpose() else: newPoints = np.array(spherical_to_cartesian(intensity[index], [math.radians(90-x) for x in verticalAngles], [math.radians(-1*horizontalAngle)]*len(verticalAngles))).transpose() points = np.concatenate((points, newPoints), axis=0) #Omniverse and 3ds Max makes the light upside down, so flip z. points[:,2] *= -1 return points def interpolateIESValues(self, originalHorizontalAngles, originalVerticalAngles, newHorizontalAngles,newVerticalAngles, intensity): fun = interpolate.interp2d(originalVerticalAngles, originalHorizontalAngles, intensity, kind='linear') # kind could be {'linear', 'cubic', 'quintic'} interpolatedIntensity = fun(newVerticalAngles,newHorizontalAngles) return interpolatedIntensity
XiaomingY/omni-ies-viewer/exts/IESViewer/config/extension.toml
[package] # Semantic Versionning is used: https://semver.org/ version = "1.0.0" authors = ["Xiaoming Yang"] # The title and description fields are primarily for displaying extension info in UI title = "IES Viewer For Display IES Light Profiles" description="This extension displays IES profiles for selected light objects." # Path (relative to the root) or content of readme markdown file for UI. readme = "docs/README.md" # URL of the extension source repository. repository = "https://github.com/XiaomingY/omni-ies-viewer" # One of categories for UI. category = "Lighting" # Keywords for the extension keywords = ["Lighting", "IES"] changelog = "docs/CHANGELOG.md" preview_image = "data/preview.png" icon = "data/icon.png" # Use omni.ui to build simple UI [dependencies] "omni.ui.scene" = { } "omni.usd" = { } "omni.kit.viewport.utility" = { } # Main python module this extension provides, it will be publicly available as "import AimingTool". [[python.module]] name = "IESViewer"
XiaomingY/omni-ies-viewer/exts/IESViewer/docs/README.md
# IES Viewer Omniverse Extension ![](../data/preview.png) This extension displays IES profile web for selected light objects. It is particularly useful for visualizing architectural lighting designs. Orientation of measured light distribution profiles could be quickly tested with visual feedback. IES files are resampled to be light weight to render. This entension is developed based on the [omni.example.ui_scene.object_info](https://github.com/NVIDIA-Omniverse/kit-extension-sample-ui-scene/tree/main/exts/omni.example.ui_scene.object_info) Supported light type: sphere light, rectangular light, disk light and cylinder light. Only Type C IES file is supported currently, which is also the most commonly used for architectural light. ## Adding This Extension To add a this extension to your Omniverse app: 1. Go to Extension Manager and turn on Viewport Utility extension 2. Turn on IESView Extension
Ekozmaster/NvidiaOmniverseRTXRemixTools/link_app.sh
#!/bin/bash set -e SCRIPT_DIR=$(dirname ${BASH_SOURCE}) cd "$SCRIPT_DIR" exec "tools/packman/python.sh" tools/scripts/link_app.py $@
Ekozmaster/NvidiaOmniverseRTXRemixTools/link_app.bat
@echo off call "%~dp0tools\packman\python.bat" %~dp0tools\scripts\link_app.py %* if %errorlevel% neq 0 ( goto Error ) :Success exit /b 0 :Error exit /b %errorlevel%
Ekozmaster/NvidiaOmniverseRTXRemixTools/README.md
# RTX Remix Tools [ekozerski.rtxremixtools] Focusing on improving RTX Remix modding workflows, this extension is designed to speed up iteration when producing assets and mods by providing useful UI operations inside Omniverse apps like USD Composer/Create or Code. It provides some options for the "Right click" context menu to setup ideal replacement assets, as well as some converting operations to ensure assets will be compatible with the Remix runtime. ![Alt text](ContextMenu.png) It is primarily designed to operate on Remix captured scenes, so users can have instant feedbacks on what their mods are gonna look like in the game scenes and iterate faster. ## Available Tools ### Fix Meshes Geometry <i>(Operation is performed on every mesh of a USD/USDA source file and can\'t be undone)</i> Interpolation Mode - RTX Remix runtime only supports meshes with "vertex" interpolation mode, in which "points" "normals" and "uvs" arrays must have the same length, but DCC tools usually export the mesh using "faceVarying" interpolation mode. This operation reorganizes the geometry to be compatible with the runtime. - See: "Interpolation of Geometric Primitive Variables" - https://openusd.org/dev/api/class_usd_geom_primvar.html - This operation only applies for meshes inside the mods folder, not the captured ones. UV Maps - The runtime supports one single UV map per mesh, which should have one of a few known names, so this script finds many variations, picks one and renames to the standard "primvars:st", while also setting the appropriate type as "TextureCoordinate" (TexCoord2fArray / TexCoord2f[]). The other UVmaps are discarded. Unused Primvars - displayColor and displayOpacity are now removed from the mesh. ### Setup for Mesh Replacement Exports the selected mesh in a selected path, already setting up the replacements and references to work in the runtime, so for every change the user only needs to: - Open the exported mesh in it's DCC of choice, make the changes and export again (with the right settings, triangulating faces, no materials, etc.) - Back in OV, refresh the reference to see the changes in the captured scene. - Use the "Fix Meshes Geometry" again to make it Remix-compatible. - Enjoy. The original mesh is kept in case the user only wants to add more models. Make sure to delete it if the intention is to completely replace the original mesh. ### Add Model If the user already has authored USD models, this option allows to select multiple models and add to the mesh_HASH prim. ### Add Material This option allows to select a material .MDL file (AperturePBR_Opacity.mdl or AperturePBR_Translucent.mdl) to add a material prim to the mesh_HASH prim. ### Original Draw Call Preservation Allows to set the "custom int preserveOriginalDrawCall" attribute to indicate whether the runtime should be forced to render the original mesh or not. Must be set to 1 when placing custom lights or else the original mesh disappears. PS: Remember to set this to 0 if you want to make a mesh replacement and remove the original mesh. ### Select Source Mesh Quick way to select the originial source mesh_HASH prim in the scene when you have an instance prim selected. <br> ## Things to Keep in mind - In a capture scene, any changes made to the "inst_SOMEHASH_x" prims won't show up in the runtime, so every changes must be done in the "mesh_SOMEHASH" they're referencing. Whenever the user clicks a inst_ prim to perform an action like Fixing geometry or Add Model (Ex: Right clicking in the 3D viewport), this tool will try to find the referenced mesh_SOMEHASH and perform the operations in it instead. - Having that in mind, always keep an eye in the "Layers" tab to check if you have done any changes to the "instances" path. Try to delete those changes as much as possible. - The only material types that work in the runtime are described in the AperturePBR_Opacity.MDL and AperturePBR_Translucent.MDL, and every mesh must be triangulated. If you want to add a model you got from somewhere else like an asset store, make sure to convert the assets to work in the runtime. - When placing lights in the scene, it is necesssary to set an int "preserveOriginalDrawCall" to "1" in order to keep rendering the original mesh. If another layer is setting this flag somewhere and you want to replace/remove the original mesh in your own layer, you will notice that the original mesh can't be removed without setting this flag back to "0". You can do that on your own layer, set it back to "0", but make sure your layer comes on top of the other one that sets it to true.
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/scripts/link_app.py
import argparse import json import os import sys import packmanapi import urllib3 def find_omniverse_apps(): http = urllib3.PoolManager() try: r = http.request("GET", "http://127.0.0.1:33480/components") except Exception as e: print(f"Failed retrieving apps from an Omniverse Launcher, maybe it is not installed?\nError: {e}") sys.exit(1) apps = {} for x in json.loads(r.data.decode("utf-8")): latest = x.get("installedVersions", {}).get("latest", "") if latest: for s in x.get("settings", []): if s.get("version", "") == latest: root = s.get("launch", {}).get("root", "") apps[x["slug"]] = (x["name"], root) break return apps def create_link(src, dst): print(f"Creating a link '{src}' -> '{dst}'") packmanapi.link(src, dst) APP_PRIORITIES = ["code", "create", "view"] if __name__ == "__main__": parser = argparse.ArgumentParser(description="Create folder link to Kit App installed from Omniverse Launcher") parser.add_argument( "--path", help="Path to Kit App installed from Omniverse Launcher, e.g.: 'C:/Users/bob/AppData/Local/ov/pkg/create-2021.3.4'", required=False, ) parser.add_argument( "--app", help="Name of Kit App installed from Omniverse Launcher, e.g.: 'code', 'create'", required=False ) args = parser.parse_args() path = args.path if not path: print("Path is not specified, looking for Omniverse Apps...") apps = find_omniverse_apps() if len(apps) == 0: print( "Can't find any Omniverse Apps. Use Omniverse Launcher to install one. 'Code' is the recommended app for developers." ) sys.exit(0) print("\nFound following Omniverse Apps:") for i, slug in enumerate(apps): name, root = apps[slug] print(f"{i}: {name} ({slug}) at: '{root}'") if args.app: selected_app = args.app.lower() if selected_app not in apps: choices = ", ".join(apps.keys()) print(f"Passed app: '{selected_app}' is not found. Specify one of the following found Apps: {choices}") sys.exit(0) else: selected_app = next((x for x in APP_PRIORITIES if x in apps), None) if not selected_app: selected_app = next(iter(apps)) print(f"\nSelected app: {selected_app}") _, path = apps[selected_app] if not os.path.exists(path): print(f"Provided path doesn't exist: {path}") else: SCRIPT_ROOT = os.path.dirname(os.path.realpath(__file__)) create_link(f"{SCRIPT_ROOT}/../../app", path) print("Success!")
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/python.sh
#!/bin/bash # Copyright 2019-2020 NVIDIA CORPORATION # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e PACKMAN_CMD="$(dirname "${BASH_SOURCE}")/packman" if [ ! -f "$PACKMAN_CMD" ]; then PACKMAN_CMD="${PACKMAN_CMD}.sh" fi source "$PACKMAN_CMD" init export PYTHONPATH="${PM_MODULE_DIR}:${PYTHONPATH}" export PYTHONNOUSERSITE=1 # workaround for our python not shipping with certs if [[ -z ${SSL_CERT_DIR:-} ]]; then export SSL_CERT_DIR=/etc/ssl/certs/ fi "${PM_PYTHON}" -u "$@"
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/python.bat
:: Copyright 2019-2020 NVIDIA CORPORATION :: :: Licensed under the Apache License, Version 2.0 (the "License"); :: you may not use this file except in compliance with the License. :: You may obtain a copy of the License at :: :: http://www.apache.org/licenses/LICENSE-2.0 :: :: Unless required by applicable law or agreed to in writing, software :: distributed under the License is distributed on an "AS IS" BASIS, :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. :: See the License for the specific language governing permissions and :: limitations under the License. @echo off setlocal call "%~dp0\packman" init set "PYTHONPATH=%PM_MODULE_DIR%;%PYTHONPATH%" set PYTHONNOUSERSITE=1 "%PM_PYTHON%" -u %*
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/packman.cmd
:: Reset errorlevel status (don't inherit from caller) [xxxxxxxxxxx] @call :ECHO_AND_RESET_ERROR :: You can remove the call below if you do your own manual configuration of the dev machines call "%~dp0\bootstrap\configure.bat" if %errorlevel% neq 0 ( exit /b %errorlevel% ) :: Everything below is mandatory if not defined PM_PYTHON goto :PYTHON_ENV_ERROR if not defined PM_MODULE goto :MODULE_ENV_ERROR :: Generate temporary path for variable file for /f "delims=" %%a in ('powershell -ExecutionPolicy ByPass -NoLogo -NoProfile ^ -File "%~dp0bootstrap\generate_temp_file_name.ps1"') do set PM_VAR_PATH=%%a if %1.==. ( set PM_VAR_PATH_ARG= ) else ( set PM_VAR_PATH_ARG=--var-path="%PM_VAR_PATH%" ) "%PM_PYTHON%" -S -s -u -E "%PM_MODULE%" %* %PM_VAR_PATH_ARG% if %errorlevel% neq 0 ( exit /b %errorlevel% ) :: Marshall environment variables into the current environment if they have been generated and remove temporary file if exist "%PM_VAR_PATH%" ( for /F "usebackq tokens=*" %%A in ("%PM_VAR_PATH%") do set "%%A" ) if %errorlevel% neq 0 ( goto :VAR_ERROR ) if exist "%PM_VAR_PATH%" ( del /F "%PM_VAR_PATH%" ) if %errorlevel% neq 0 ( goto :VAR_ERROR ) set PM_VAR_PATH= goto :eof :: Subroutines below :PYTHON_ENV_ERROR @echo User environment variable PM_PYTHON is not set! Please configure machine for packman or call configure.bat. exit /b 1 :MODULE_ENV_ERROR @echo User environment variable PM_MODULE is not set! Please configure machine for packman or call configure.bat. exit /b 1 :VAR_ERROR @echo Error while processing and setting environment variables! exit /b 1 :ECHO_AND_RESET_ERROR @echo off if /I "%PM_VERBOSITY%"=="debug" ( @echo on ) exit /b 0
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/config.packman.xml
<config remotes="cloudfront"> <remote2 name="cloudfront"> <transport actions="download" protocol="https" packageLocation="d4i3qtqj3r0z5.cloudfront.net/${name}@${version}" /> </remote2> </config>
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/bootstrap/generate_temp_file_name.ps1
<# Copyright 2019 NVIDIA CORPORATION Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. #> $out = [System.IO.Path]::GetTempFileName() Write-Host $out # SIG # Begin signature block # MIIaVwYJKoZIhvcNAQcCoIIaSDCCGkQCAQExDzANBglghkgBZQMEAgEFADB5Bgor # BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG # KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAK+Ewup1N0/mdf # 1l4R58rxyumHgZvTmEhrYTb2Zf0zd6CCCiIwggTTMIIDu6ADAgECAhBi50XpIWUh # PJcfXEkK6hKlMA0GCSqGSIb3DQEBCwUAMIGEMQswCQYDVQQGEwJVUzEdMBsGA1UE # ChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNVBAsTFlN5bWFudGVjIFRydXN0 # IE5ldHdvcmsxNTAzBgNVBAMTLFN5bWFudGVjIENsYXNzIDMgU0hBMjU2IENvZGUg # U2lnbmluZyBDQSAtIEcyMB4XDTE4MDcwOTAwMDAwMFoXDTIxMDcwOTIzNTk1OVow # gYMxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRQwEgYDVQQHDAtT # YW50YSBDbGFyYTEbMBkGA1UECgwSTlZJRElBIENvcnBvcmF0aW9uMQ8wDQYDVQQL # DAZJVC1NSVMxGzAZBgNVBAMMEk5WSURJQSBDb3Jwb3JhdGlvbjCCASIwDQYJKoZI # hvcNAQEBBQADggEPADCCAQoCggEBALEZN63dA47T4i90jZ84CJ/aWUwVtLff8AyP # YspFfIZGdZYiMgdb8A5tBh7653y0G/LZL6CVUkgejcpvBU/Dl/52a+gSWy2qJ2bH # jMFMKCyQDhdpCAKMOUKSC9rfzm4cFeA9ct91LQCAait4LhLlZt/HF7aG+r0FgCZa # HJjJvE7KNY9G4AZXxjSt8CXS8/8NQMANqjLX1r+F+Hl8PzQ1fVx0mMsbdtaIV4Pj # 5flAeTUnz6+dCTx3vTUo8MYtkS2UBaQv7t7H2B7iwJDakEQKk1XHswJdeqG0osDU # z6+NVks7uWE1N8UIhvzbw0FEX/U2kpfyWaB/J3gMl8rVR8idPj8CAwEAAaOCAT4w # ggE6MAkGA1UdEwQCMAAwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUF # BwMDMGEGA1UdIARaMFgwVgYGZ4EMAQQBMEwwIwYIKwYBBQUHAgEWF2h0dHBzOi8v # ZC5zeW1jYi5jb20vY3BzMCUGCCsGAQUFBwICMBkMF2h0dHBzOi8vZC5zeW1jYi5j # b20vcnBhMB8GA1UdIwQYMBaAFNTABiJJ6zlL3ZPiXKG4R3YJcgNYMCsGA1UdHwQk # MCIwIKAeoByGGmh0dHA6Ly9yYi5zeW1jYi5jb20vcmIuY3JsMFcGCCsGAQUFBwEB # BEswSTAfBggrBgEFBQcwAYYTaHR0cDovL3JiLnN5bWNkLmNvbTAmBggrBgEFBQcw # AoYaaHR0cDovL3JiLnN5bWNiLmNvbS9yYi5jcnQwDQYJKoZIhvcNAQELBQADggEB # AIJKh5vKJdhHJtMzATmc1BmXIQ3RaJONOZ5jMHn7HOkYU1JP0OIzb4pXXkH8Xwfr # K6bnd72IhcteyksvKsGpSvK0PBBwzodERTAu1Os2N+EaakxQwV/xtqDm1E3IhjHk # fRshyKKzmFk2Ci323J4lHtpWUj5Hz61b8gd72jH7xnihGi+LORJ2uRNZ3YuqMNC3 # SBC8tAyoJqEoTJirULUCXW6wX4XUm5P2sx+htPw7szGblVKbQ+PFinNGnsSEZeKz # D8jUb++1cvgTKH59Y6lm43nsJjkZU77tNqyq4ABwgQRk6lt8cS2PPwjZvTmvdnla # ZhR0K4of+pQaUQHXVIBdji8wggVHMIIEL6ADAgECAhB8GzU1SufbdOdBXxFpymuo # MA0GCSqGSIb3DQEBCwUAMIG9MQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNp # Z24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNV # BAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl # IG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmlj # YXRpb24gQXV0aG9yaXR5MB4XDTE0MDcyMjAwMDAwMFoXDTI0MDcyMTIzNTk1OVow # gYQxCzAJBgNVBAYTAlVTMR0wGwYDVQQKExRTeW1hbnRlYyBDb3Jwb3JhdGlvbjEf # MB0GA1UECxMWU3ltYW50ZWMgVHJ1c3QgTmV0d29yazE1MDMGA1UEAxMsU3ltYW50 # ZWMgQ2xhc3MgMyBTSEEyNTYgQ29kZSBTaWduaW5nIENBIC0gRzIwggEiMA0GCSqG # SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDXlUPU3N9nrjn7UqS2JjEEcOm3jlsqujdp # NZWPu8Aw54bYc7vf69F2P4pWjustS/BXGE6xjaUz0wt1I9VqeSfdo9P3Dodltd6t # HPH1NbQiUa8iocFdS5B/wFlOq515qQLXHkmxO02H/sJ4q7/vUq6crwjZOeWaUT5p # XzAQTnFjbFjh8CAzGw90vlvLEuHbjMSAlHK79kWansElC/ujHJ7YpglwcezAR0yP # fcPeGc4+7gRyjhfT//CyBTIZTNOwHJ/+pXggQnBBsCaMbwDIOgARQXpBsKeKkQSg # mXj0d7TzYCrmbFAEtxRg/w1R9KiLhP4h2lxeffUpeU+wRHRvbXL/AgMBAAGjggF4 # MIIBdDAuBggrBgEFBQcBAQQiMCAwHgYIKwYBBQUHMAGGEmh0dHA6Ly9zLnN5bWNk # LmNvbTASBgNVHRMBAf8ECDAGAQH/AgEAMGYGA1UdIARfMF0wWwYLYIZIAYb4RQEH # FwMwTDAjBggrBgEFBQcCARYXaHR0cHM6Ly9kLnN5bWNiLmNvbS9jcHMwJQYIKwYB # BQUHAgIwGRoXaHR0cHM6Ly9kLnN5bWNiLmNvbS9ycGEwNgYDVR0fBC8wLTAroCmg # J4YlaHR0cDovL3Muc3ltY2IuY29tL3VuaXZlcnNhbC1yb290LmNybDATBgNVHSUE # DDAKBggrBgEFBQcDAzAOBgNVHQ8BAf8EBAMCAQYwKQYDVR0RBCIwIKQeMBwxGjAY # BgNVBAMTEVN5bWFudGVjUEtJLTEtNzI0MB0GA1UdDgQWBBTUwAYiSes5S92T4lyh # uEd2CXIDWDAfBgNVHSMEGDAWgBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG # 9w0BAQsFAAOCAQEAf+vKp+qLdkLrPo4gVDDjt7nc+kg+FscPRZUQzSeGo2bzAu1x # +KrCVZeRcIP5Un5SaTzJ8eCURoAYu6HUpFam8x0AkdWG80iH4MvENGggXrTL+QXt # nK9wUye56D5+UaBpcYvcUe2AOiUyn0SvbkMo0yF1u5fYi4uM/qkERgSF9xWcSxGN # xCwX/tVuf5riVpLxlrOtLfn039qJmc6yOETA90d7yiW5+ipoM5tQct6on9TNLAs0 # vYsweEDgjY4nG5BvGr4IFYFd6y/iUedRHsl4KeceZb847wFKAQkkDhbEFHnBQTc0 # 0D2RUpSd4WjvCPDiaZxnbpALGpNx1CYCw8BaIzGCD4swgg+HAgEBMIGZMIGEMQsw # CQYDVQQGEwJVUzEdMBsGA1UEChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNV # BAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxNTAzBgNVBAMTLFN5bWFudGVjIENs # YXNzIDMgU0hBMjU2IENvZGUgU2lnbmluZyBDQSAtIEcyAhBi50XpIWUhPJcfXEkK # 6hKlMA0GCWCGSAFlAwQCAQUAoHwwEAYKKwYBBAGCNwIBDDECMAAwGQYJKoZIhvcN # AQkDMQwGCisGAQQBgjcCAQQwHAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUw # LwYJKoZIhvcNAQkEMSIEIPW+EpFrZSdzrjFFo0UT+PzFeYn/GcWNyWFaU/JMrMfR # MA0GCSqGSIb3DQEBAQUABIIBAA8fmU/RJcF9t60DZZAjf8FB3EZddOaHgI9z40nV # CnfTGi0OEYU48Pe9jkQQV2fABpACfW74xmNv3QNgP2qP++mkpKBVv28EIAuINsFt # YAITEljLN/VOVul8lvjxar5GSFFgpE5F6j4xcvI69LuCWbN8cteTVsBGg+eGmjfx # QZxP252z3FqPN+mihtFegF2wx6Mg6/8jZjkO0xjBOwSdpTL4uyQfHvaPBKXuWxRx # ioXw4ezGAwkuBoxWK8UG7Qu+7CSfQ3wMOjvyH2+qn30lWEsvRMdbGAp7kvfr3EGZ # a3WN7zXZ+6KyZeLeEH7yCDzukAjptaY/+iLVjJsuzC6tCSqhgg1EMIINQAYKKwYB # BAGCNwMDATGCDTAwgg0sBgkqhkiG9w0BBwKggg0dMIINGQIBAzEPMA0GCWCGSAFl # AwQCAQUAMHcGCyqGSIb3DQEJEAEEoGgEZjBkAgEBBglghkgBhv1sBwEwMTANBglg # hkgBZQMEAgEFAAQg14BnPazQkW9whhZu1d0bC3lqqScvxb3SSb1QT8e3Xg0CEFhw # aMBZ2hExXhr79A9+bXEYDzIwMjEwNDA4MDkxMTA5WqCCCjcwggT+MIID5qADAgEC # AhANQkrgvjqI/2BAIc4UAPDdMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNVBAYTAlVT # MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j # b20xMTAvBgNVBAMTKERpZ2lDZXJ0IFNIQTIgQXNzdXJlZCBJRCBUaW1lc3RhbXBp # bmcgQ0EwHhcNMjEwMTAxMDAwMDAwWhcNMzEwMTA2MDAwMDAwWjBIMQswCQYDVQQG # EwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xIDAeBgNVBAMTF0RpZ2lDZXJ0 # IFRpbWVzdGFtcCAyMDIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA # wuZhhGfFivUNCKRFymNrUdc6EUK9CnV1TZS0DFC1JhD+HchvkWsMlucaXEjvROW/ # m2HNFZFiWrj/ZwucY/02aoH6KfjdK3CF3gIY83htvH35x20JPb5qdofpir34hF0e # dsnkxnZ2OlPR0dNaNo/Go+EvGzq3YdZz7E5tM4p8XUUtS7FQ5kE6N1aG3JMjjfdQ # Jehk5t3Tjy9XtYcg6w6OLNUj2vRNeEbjA4MxKUpcDDGKSoyIxfcwWvkUrxVfbENJ # Cf0mI1P2jWPoGqtbsR0wwptpgrTb/FZUvB+hh6u+elsKIC9LCcmVp42y+tZji06l # chzun3oBc/gZ1v4NSYS9AQIDAQABo4IBuDCCAbQwDgYDVR0PAQH/BAQDAgeAMAwG # A1UdEwEB/wQCMAAwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwgwQQYDVR0gBDowODA2 # BglghkgBhv1sBwEwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5kaWdpY2VydC5j # b20vQ1BTMB8GA1UdIwQYMBaAFPS24SAd/imu0uRhpbKiJbLIFzVuMB0GA1UdDgQW # BBQ2RIaOpLqwZr68KC0dRDbd42p6vDBxBgNVHR8EajBoMDKgMKAuhixodHRwOi8v # Y3JsMy5kaWdpY2VydC5jb20vc2hhMi1hc3N1cmVkLXRzLmNybDAyoDCgLoYsaHR0 # cDovL2NybDQuZGlnaWNlcnQuY29tL3NoYTItYXNzdXJlZC10cy5jcmwwgYUGCCsG # AQUFBwEBBHkwdzAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29t # ME8GCCsGAQUFBzAChkNodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNl # cnRTSEEyQXNzdXJlZElEVGltZXN0YW1waW5nQ0EuY3J0MA0GCSqGSIb3DQEBCwUA # A4IBAQBIHNy16ZojvOca5yAOjmdG/UJyUXQKI0ejq5LSJcRwWb4UoOUngaVNFBUZ # B3nw0QTDhtk7vf5EAmZN7WmkD/a4cM9i6PVRSnh5Nnont/PnUp+Tp+1DnnvntN1B # Ion7h6JGA0789P63ZHdjXyNSaYOC+hpT7ZDMjaEXcw3082U5cEvznNZ6e9oMvD0y # 0BvL9WH8dQgAdryBDvjA4VzPxBFy5xtkSdgimnUVQvUtMjiB2vRgorq0Uvtc4GEk # JU+y38kpqHNDUdq9Y9YfW5v3LhtPEx33Sg1xfpe39D+E68Hjo0mh+s6nv1bPull2 # YYlffqe0jmd4+TaY4cso2luHpoovMIIFMTCCBBmgAwIBAgIQCqEl1tYyG35B5AXa # NpfCFTANBgkqhkiG9w0BAQsFADBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGln # aUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtE # aWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMTYwMTA3MTIwMDAwWhcNMzEw # MTA3MTIwMDAwWjByMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5j # MRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTEwLwYDVQQDEyhEaWdpQ2VydCBT # SEEyIEFzc3VyZWQgSUQgVGltZXN0YW1waW5nIENBMIIBIjANBgkqhkiG9w0BAQEF # AAOCAQ8AMIIBCgKCAQEAvdAy7kvNj3/dqbqCmcU5VChXtiNKxA4HRTNREH3Q+X1N # aH7ntqD0jbOI5Je/YyGQmL8TvFfTw+F+CNZqFAA49y4eO+7MpvYyWf5fZT/gm+vj # RkcGGlV+Cyd+wKL1oODeIj8O/36V+/OjuiI+GKwR5PCZA207hXwJ0+5dyJoLVOOo # CXFr4M8iEA91z3FyTgqt30A6XLdR4aF5FMZNJCMwXbzsPGBqrC8HzP3w6kfZiFBe # /WZuVmEnKYmEUeaC50ZQ/ZQqLKfkdT66mA+Ef58xFNat1fJky3seBdCEGXIX8RcG # 7z3N1k3vBkL9olMqT4UdxB08r8/arBD13ays6Vb/kwIDAQABo4IBzjCCAcowHQYD # VR0OBBYEFPS24SAd/imu0uRhpbKiJbLIFzVuMB8GA1UdIwQYMBaAFEXroq/0ksuC # MS1Ri6enIZ3zbcgPMBIGA1UdEwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGG # MBMGA1UdJQQMMAoGCCsGAQUFBwMIMHkGCCsGAQUFBwEBBG0wazAkBggrBgEFBQcw # AYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMEMGCCsGAQUFBzAChjdodHRwOi8v # Y2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3J0 # MIGBBgNVHR8EejB4MDqgOKA2hjRodHRwOi8vY3JsNC5kaWdpY2VydC5jb20vRGln # aUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMDqgOKA2hjRodHRwOi8vY3JsMy5kaWdp # Y2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMFAGA1UdIARJMEcw # OAYKYIZIAYb9bAACBDAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2Vy # dC5jb20vQ1BTMAsGCWCGSAGG/WwHATANBgkqhkiG9w0BAQsFAAOCAQEAcZUS6VGH # VmnN793afKpjerN4zwY3QITvS4S/ys8DAv3Fp8MOIEIsr3fzKx8MIVoqtwU0HWqu # mfgnoma/Capg33akOpMP+LLR2HwZYuhegiUexLoceywh4tZbLBQ1QwRostt1AuBy # x5jWPGTlH0gQGF+JOGFNYkYkh2OMkVIsrymJ5Xgf1gsUpYDXEkdws3XVk4WTfraS # Z/tTYYmo9WuWwPRYaQ18yAGxuSh1t5ljhSKMYcp5lH5Z/IwP42+1ASa2bKXuh1Eh # 5Fhgm7oMLSttosR+u8QlK0cCCHxJrhO24XxCQijGGFbPQTS2Zl22dHv1VjMiLyI2 # skuiSpXY9aaOUjGCAk0wggJJAgEBMIGGMHIxCzAJBgNVBAYTAlVTMRUwEwYDVQQK # EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xMTAvBgNV # BAMTKERpZ2lDZXJ0IFNIQTIgQXNzdXJlZCBJRCBUaW1lc3RhbXBpbmcgQ0ECEA1C # SuC+Ooj/YEAhzhQA8N0wDQYJYIZIAWUDBAIBBQCggZgwGgYJKoZIhvcNAQkDMQ0G # CyqGSIb3DQEJEAEEMBwGCSqGSIb3DQEJBTEPFw0yMTA0MDgwOTExMDlaMCsGCyqG # SIb3DQEJEAIMMRwwGjAYMBYEFOHXgqjhkb7va8oWkbWqtJSmJJvzMC8GCSqGSIb3 # DQEJBDEiBCCHEAmNNj2zWjWYRfEi4FgzZvrI16kv/U2b9b3oHw6UVDANBgkqhkiG # 9w0BAQEFAASCAQCdefEKh6Qmwx7xGCkrYi/A+/Cla6LdnYJp38eMs3fqTTvjhyDw # HffXrwdqWy5/fgW3o3qJXqa5o7hLxYIoWSULOCpJRGdt+w7XKPAbZqHrN9elAhWJ # vpBTCEaj7dVxr1Ka4NsoPSYe0eidDBmmvGvp02J4Z1j8+ImQPKN6Hv/L8Ixaxe7V # mH4VtXIiBK8xXdi4wzO+A+qLtHEJXz3Gw8Bp3BNtlDGIUkIhVTM3Q1xcSEqhOLqo # PGdwCw9acxdXNWWPjOJkNH656Bvmkml+0p6MTGIeG4JCeRh1Wpqm1ZGSoEcXNaof # wOgj48YzI+dNqBD9i7RSWCqJr2ygYKRTxnuU # SIG # End signature block
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/bootstrap/configure.bat
:: Copyright 2019 NVIDIA CORPORATION :: :: Licensed under the Apache License, Version 2.0 (the "License"); :: you may not use this file except in compliance with the License. :: You may obtain a copy of the License at :: :: http://www.apache.org/licenses/LICENSE-2.0 :: :: Unless required by applicable law or agreed to in writing, software :: distributed under the License is distributed on an "AS IS" BASIS, :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. :: See the License for the specific language governing permissions and :: limitations under the License. set PM_PACKMAN_VERSION=6.33.2 :: Specify where packman command is rooted set PM_INSTALL_PATH=%~dp0.. :: The external root may already be configured and we should do minimal work in that case if defined PM_PACKAGES_ROOT goto ENSURE_DIR :: If the folder isn't set we assume that the best place for it is on the drive that we are currently :: running from set PM_DRIVE=%CD:~0,2% set PM_PACKAGES_ROOT=%PM_DRIVE%\packman-repo :: We use *setx* here so that the variable is persisted in the user environment echo Setting user environment variable PM_PACKAGES_ROOT to %PM_PACKAGES_ROOT% setx PM_PACKAGES_ROOT %PM_PACKAGES_ROOT% if %errorlevel% neq 0 ( goto ERROR ) :: The above doesn't work properly from a build step in VisualStudio because a separate process is :: spawned for it so it will be lost for subsequent compilation steps - VisualStudio must :: be launched from a new process. We catch this odd-ball case here: if defined PM_DISABLE_VS_WARNING goto ENSURE_DIR if not defined VSLANG goto ENSURE_DIR echo The above is a once-per-computer operation. Unfortunately VisualStudio cannot pick up environment change echo unless *VisualStudio is RELAUNCHED*. echo If you are launching VisualStudio from command line or command line utility make sure echo you have a fresh launch environment (relaunch the command line or utility). echo If you are using 'linkPath' and referring to packages via local folder links you can safely ignore this warning. echo You can disable this warning by setting the environment variable PM_DISABLE_VS_WARNING. echo. :: Check for the directory that we need. Note that mkdir will create any directories :: that may be needed in the path :ENSURE_DIR if not exist "%PM_PACKAGES_ROOT%" ( echo Creating directory %PM_PACKAGES_ROOT% mkdir "%PM_PACKAGES_ROOT%" ) if %errorlevel% neq 0 ( goto ERROR_MKDIR_PACKAGES_ROOT ) :: The Python interpreter may already be externally configured if defined PM_PYTHON_EXT ( set PM_PYTHON=%PM_PYTHON_EXT% goto PACKMAN ) set PM_PYTHON_VERSION=3.7.9-windows-x86_64 set PM_PYTHON_BASE_DIR=%PM_PACKAGES_ROOT%\python set PM_PYTHON_DIR=%PM_PYTHON_BASE_DIR%\%PM_PYTHON_VERSION% set PM_PYTHON=%PM_PYTHON_DIR%\python.exe if exist "%PM_PYTHON%" goto PACKMAN if not exist "%PM_PYTHON_BASE_DIR%" call :CREATE_PYTHON_BASE_DIR set PM_PYTHON_PACKAGE=python@%PM_PYTHON_VERSION%.cab for /f "delims=" %%a in ('powershell -ExecutionPolicy ByPass -NoLogo -NoProfile -File "%~dp0\generate_temp_file_name.ps1"') do set TEMP_FILE_NAME=%%a set TARGET=%TEMP_FILE_NAME%.zip call "%~dp0fetch_file_from_packman_bootstrap.cmd" %PM_PYTHON_PACKAGE% "%TARGET%" if %errorlevel% neq 0 ( echo !!! Error fetching python from CDN !!! goto ERROR ) for /f "delims=" %%a in ('powershell -ExecutionPolicy ByPass -NoLogo -NoProfile -File "%~dp0\generate_temp_folder.ps1" -parentPath "%PM_PYTHON_BASE_DIR%"') do set TEMP_FOLDER_NAME=%%a echo Unpacking Python interpreter ... "%SystemRoot%\system32\expand.exe" -F:* "%TARGET%" "%TEMP_FOLDER_NAME%" 1> nul del "%TARGET%" :: Failure during extraction to temp folder name, need to clean up and abort if %errorlevel% neq 0 ( echo !!! Error unpacking python !!! call :CLEAN_UP_TEMP_FOLDER goto ERROR ) :: If python has now been installed by a concurrent process we need to clean up and then continue if exist "%PM_PYTHON%" ( call :CLEAN_UP_TEMP_FOLDER goto PACKMAN ) else ( if exist "%PM_PYTHON_DIR%" ( rd /s /q "%PM_PYTHON_DIR%" > nul ) ) :: Perform atomic rename rename "%TEMP_FOLDER_NAME%" "%PM_PYTHON_VERSION%" 1> nul :: Failure during move, need to clean up and abort if %errorlevel% neq 0 ( echo !!! Error renaming python !!! call :CLEAN_UP_TEMP_FOLDER goto ERROR ) :PACKMAN :: The packman module may already be externally configured if defined PM_MODULE_DIR_EXT ( set PM_MODULE_DIR=%PM_MODULE_DIR_EXT% ) else ( set PM_MODULE_DIR=%PM_PACKAGES_ROOT%\packman-common\%PM_PACKMAN_VERSION% ) set PM_MODULE=%PM_MODULE_DIR%\packman.py if exist "%PM_MODULE%" goto ENSURE_7ZA set PM_MODULE_PACKAGE=packman-common@%PM_PACKMAN_VERSION%.zip for /f "delims=" %%a in ('powershell -ExecutionPolicy ByPass -NoLogo -NoProfile -File "%~dp0\generate_temp_file_name.ps1"') do set TEMP_FILE_NAME=%%a set TARGET=%TEMP_FILE_NAME% call "%~dp0fetch_file_from_packman_bootstrap.cmd" %PM_MODULE_PACKAGE% "%TARGET%" if %errorlevel% neq 0 ( echo !!! Error fetching packman from CDN !!! goto ERROR ) echo Unpacking ... "%PM_PYTHON%" -S -s -u -E "%~dp0\install_package.py" "%TARGET%" "%PM_MODULE_DIR%" if %errorlevel% neq 0 ( echo !!! Error unpacking packman !!! goto ERROR ) del "%TARGET%" :ENSURE_7ZA set PM_7Za_VERSION=16.02.4 set PM_7Za_PATH=%PM_PACKAGES_ROOT%\7za\%PM_7ZA_VERSION% if exist "%PM_7Za_PATH%" goto END set PM_7Za_PATH=%PM_PACKAGES_ROOT%\chk\7za\%PM_7ZA_VERSION% if exist "%PM_7Za_PATH%" goto END "%PM_PYTHON%" -S -s -u -E "%PM_MODULE%" pull "%PM_MODULE_DIR%\deps.packman.xml" if %errorlevel% neq 0 ( echo !!! Error fetching packman dependencies !!! goto ERROR ) goto END :ERROR_MKDIR_PACKAGES_ROOT echo Failed to automatically create packman packages repo at %PM_PACKAGES_ROOT%. echo Please set a location explicitly that packman has permission to write to, by issuing: echo. echo setx PM_PACKAGES_ROOT {path-you-choose-for-storing-packman-packages-locally} echo. echo Then launch a new command console for the changes to take effect and run packman command again. exit /B %errorlevel% :ERROR echo !!! Failure while configuring local machine :( !!! exit /B %errorlevel% :CLEAN_UP_TEMP_FOLDER rd /S /Q "%TEMP_FOLDER_NAME%" exit /B :CREATE_PYTHON_BASE_DIR :: We ignore errors and clean error state - if two processes create the directory one will fail which is fine md "%PM_PYTHON_BASE_DIR%" > nul 2>&1 exit /B 0 :END
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/bootstrap/fetch_file_from_packman_bootstrap.cmd
:: Copyright 2019 NVIDIA CORPORATION :: :: Licensed under the Apache License, Version 2.0 (the "License"); :: you may not use this file except in compliance with the License. :: You may obtain a copy of the License at :: :: http://www.apache.org/licenses/LICENSE-2.0 :: :: Unless required by applicable law or agreed to in writing, software :: distributed under the License is distributed on an "AS IS" BASIS, :: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. :: See the License for the specific language governing permissions and :: limitations under the License. :: You need to specify <package-name> <target-path> as input to this command @setlocal @set PACKAGE_NAME=%1 @set TARGET_PATH=%2 @echo Fetching %PACKAGE_NAME% ... @powershell -ExecutionPolicy ByPass -NoLogo -NoProfile -File "%~dp0download_file_from_url.ps1" ^ -source "http://bootstrap.packman.nvidia.com/%PACKAGE_NAME%" -output %TARGET_PATH% :: A bug in powershell prevents the errorlevel code from being set when using the -File execution option :: We must therefore do our own failure analysis, basically make sure the file exists and is larger than 0 bytes: @if not exist %TARGET_PATH% goto ERROR_DOWNLOAD_FAILED @if %~z2==0 goto ERROR_DOWNLOAD_FAILED @endlocal @exit /b 0 :ERROR_DOWNLOAD_FAILED @echo Failed to download file from S3 @echo Most likely because endpoint cannot be reached or file %PACKAGE_NAME% doesn't exist @endlocal @exit /b 1
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/bootstrap/download_file_from_url.ps1
<# Copyright 2019 NVIDIA CORPORATION Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. #> param( [Parameter(Mandatory=$true)][string]$source=$null, [string]$output="out.exe" ) $filename = $output $triesLeft = 3 do { $triesLeft -= 1 try { Write-Host "Downloading from bootstrap.packman.nvidia.com ..." $wc = New-Object net.webclient $wc.Downloadfile($source, $fileName) $triesLeft = 0 } catch { Write-Host "Error downloading $source!" Write-Host $_.Exception|format-list -force } } while ($triesLeft -gt 0)
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/bootstrap/generate_temp_folder.ps1
<# Copyright 2019 NVIDIA CORPORATION Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. #> param( [Parameter(Mandatory=$true)][string]$parentPath=$null ) [string] $name = [System.Guid]::NewGuid() $out = Join-Path $parentPath $name New-Item -ItemType Directory -Path ($out) | Out-Null Write-Host $out # SIG # Begin signature block # MIIaVwYJKoZIhvcNAQcCoIIaSDCCGkQCAQExDzANBglghkgBZQMEAgEFADB5Bgor # BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG # KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCB29nsqMEu+VmSF # 7ckeVTPrEZ6hsXjOgPFlJm9ilgHUB6CCCiIwggTTMIIDu6ADAgECAhBi50XpIWUh # PJcfXEkK6hKlMA0GCSqGSIb3DQEBCwUAMIGEMQswCQYDVQQGEwJVUzEdMBsGA1UE # ChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNVBAsTFlN5bWFudGVjIFRydXN0 # IE5ldHdvcmsxNTAzBgNVBAMTLFN5bWFudGVjIENsYXNzIDMgU0hBMjU2IENvZGUg # U2lnbmluZyBDQSAtIEcyMB4XDTE4MDcwOTAwMDAwMFoXDTIxMDcwOTIzNTk1OVow # gYMxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRQwEgYDVQQHDAtT # YW50YSBDbGFyYTEbMBkGA1UECgwSTlZJRElBIENvcnBvcmF0aW9uMQ8wDQYDVQQL # DAZJVC1NSVMxGzAZBgNVBAMMEk5WSURJQSBDb3Jwb3JhdGlvbjCCASIwDQYJKoZI # hvcNAQEBBQADggEPADCCAQoCggEBALEZN63dA47T4i90jZ84CJ/aWUwVtLff8AyP # YspFfIZGdZYiMgdb8A5tBh7653y0G/LZL6CVUkgejcpvBU/Dl/52a+gSWy2qJ2bH # jMFMKCyQDhdpCAKMOUKSC9rfzm4cFeA9ct91LQCAait4LhLlZt/HF7aG+r0FgCZa # HJjJvE7KNY9G4AZXxjSt8CXS8/8NQMANqjLX1r+F+Hl8PzQ1fVx0mMsbdtaIV4Pj # 5flAeTUnz6+dCTx3vTUo8MYtkS2UBaQv7t7H2B7iwJDakEQKk1XHswJdeqG0osDU # z6+NVks7uWE1N8UIhvzbw0FEX/U2kpfyWaB/J3gMl8rVR8idPj8CAwEAAaOCAT4w # ggE6MAkGA1UdEwQCMAAwDgYDVR0PAQH/BAQDAgeAMBMGA1UdJQQMMAoGCCsGAQUF # BwMDMGEGA1UdIARaMFgwVgYGZ4EMAQQBMEwwIwYIKwYBBQUHAgEWF2h0dHBzOi8v # ZC5zeW1jYi5jb20vY3BzMCUGCCsGAQUFBwICMBkMF2h0dHBzOi8vZC5zeW1jYi5j # b20vcnBhMB8GA1UdIwQYMBaAFNTABiJJ6zlL3ZPiXKG4R3YJcgNYMCsGA1UdHwQk # MCIwIKAeoByGGmh0dHA6Ly9yYi5zeW1jYi5jb20vcmIuY3JsMFcGCCsGAQUFBwEB # BEswSTAfBggrBgEFBQcwAYYTaHR0cDovL3JiLnN5bWNkLmNvbTAmBggrBgEFBQcw # AoYaaHR0cDovL3JiLnN5bWNiLmNvbS9yYi5jcnQwDQYJKoZIhvcNAQELBQADggEB # AIJKh5vKJdhHJtMzATmc1BmXIQ3RaJONOZ5jMHn7HOkYU1JP0OIzb4pXXkH8Xwfr # K6bnd72IhcteyksvKsGpSvK0PBBwzodERTAu1Os2N+EaakxQwV/xtqDm1E3IhjHk # fRshyKKzmFk2Ci323J4lHtpWUj5Hz61b8gd72jH7xnihGi+LORJ2uRNZ3YuqMNC3 # SBC8tAyoJqEoTJirULUCXW6wX4XUm5P2sx+htPw7szGblVKbQ+PFinNGnsSEZeKz # D8jUb++1cvgTKH59Y6lm43nsJjkZU77tNqyq4ABwgQRk6lt8cS2PPwjZvTmvdnla # ZhR0K4of+pQaUQHXVIBdji8wggVHMIIEL6ADAgECAhB8GzU1SufbdOdBXxFpymuo # MA0GCSqGSIb3DQEBCwUAMIG9MQswCQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNp # Z24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNV # BAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl # IG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNhbCBSb290IENlcnRpZmlj # YXRpb24gQXV0aG9yaXR5MB4XDTE0MDcyMjAwMDAwMFoXDTI0MDcyMTIzNTk1OVow # gYQxCzAJBgNVBAYTAlVTMR0wGwYDVQQKExRTeW1hbnRlYyBDb3Jwb3JhdGlvbjEf # MB0GA1UECxMWU3ltYW50ZWMgVHJ1c3QgTmV0d29yazE1MDMGA1UEAxMsU3ltYW50 # ZWMgQ2xhc3MgMyBTSEEyNTYgQ29kZSBTaWduaW5nIENBIC0gRzIwggEiMA0GCSqG # SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDXlUPU3N9nrjn7UqS2JjEEcOm3jlsqujdp # NZWPu8Aw54bYc7vf69F2P4pWjustS/BXGE6xjaUz0wt1I9VqeSfdo9P3Dodltd6t # HPH1NbQiUa8iocFdS5B/wFlOq515qQLXHkmxO02H/sJ4q7/vUq6crwjZOeWaUT5p # XzAQTnFjbFjh8CAzGw90vlvLEuHbjMSAlHK79kWansElC/ujHJ7YpglwcezAR0yP # fcPeGc4+7gRyjhfT//CyBTIZTNOwHJ/+pXggQnBBsCaMbwDIOgARQXpBsKeKkQSg # mXj0d7TzYCrmbFAEtxRg/w1R9KiLhP4h2lxeffUpeU+wRHRvbXL/AgMBAAGjggF4 # MIIBdDAuBggrBgEFBQcBAQQiMCAwHgYIKwYBBQUHMAGGEmh0dHA6Ly9zLnN5bWNk # LmNvbTASBgNVHRMBAf8ECDAGAQH/AgEAMGYGA1UdIARfMF0wWwYLYIZIAYb4RQEH # FwMwTDAjBggrBgEFBQcCARYXaHR0cHM6Ly9kLnN5bWNiLmNvbS9jcHMwJQYIKwYB # BQUHAgIwGRoXaHR0cHM6Ly9kLnN5bWNiLmNvbS9ycGEwNgYDVR0fBC8wLTAroCmg # J4YlaHR0cDovL3Muc3ltY2IuY29tL3VuaXZlcnNhbC1yb290LmNybDATBgNVHSUE # DDAKBggrBgEFBQcDAzAOBgNVHQ8BAf8EBAMCAQYwKQYDVR0RBCIwIKQeMBwxGjAY # BgNVBAMTEVN5bWFudGVjUEtJLTEtNzI0MB0GA1UdDgQWBBTUwAYiSes5S92T4lyh # uEd2CXIDWDAfBgNVHSMEGDAWgBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG # 9w0BAQsFAAOCAQEAf+vKp+qLdkLrPo4gVDDjt7nc+kg+FscPRZUQzSeGo2bzAu1x # +KrCVZeRcIP5Un5SaTzJ8eCURoAYu6HUpFam8x0AkdWG80iH4MvENGggXrTL+QXt # nK9wUye56D5+UaBpcYvcUe2AOiUyn0SvbkMo0yF1u5fYi4uM/qkERgSF9xWcSxGN # xCwX/tVuf5riVpLxlrOtLfn039qJmc6yOETA90d7yiW5+ipoM5tQct6on9TNLAs0 # vYsweEDgjY4nG5BvGr4IFYFd6y/iUedRHsl4KeceZb847wFKAQkkDhbEFHnBQTc0 # 0D2RUpSd4WjvCPDiaZxnbpALGpNx1CYCw8BaIzGCD4swgg+HAgEBMIGZMIGEMQsw # CQYDVQQGEwJVUzEdMBsGA1UEChMUU3ltYW50ZWMgQ29ycG9yYXRpb24xHzAdBgNV # BAsTFlN5bWFudGVjIFRydXN0IE5ldHdvcmsxNTAzBgNVBAMTLFN5bWFudGVjIENs # YXNzIDMgU0hBMjU2IENvZGUgU2lnbmluZyBDQSAtIEcyAhBi50XpIWUhPJcfXEkK # 6hKlMA0GCWCGSAFlAwQCAQUAoHwwEAYKKwYBBAGCNwIBDDECMAAwGQYJKoZIhvcN # AQkDMQwGCisGAQQBgjcCAQQwHAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUw # LwYJKoZIhvcNAQkEMSIEIG5YDmcpqLxn4SB0H6OnuVkZRPh6OJ77eGW/6Su/uuJg # MA0GCSqGSIb3DQEBAQUABIIBAA3N2vqfA6WDgqz/7EoAKVIE5Hn7xpYDGhPvFAMV # BslVpeqE3apTcYFCEcwLtzIEc/zmpULxsX8B0SUT2VXbJN3zzQ80b+gbgpq62Zk+ # dQLOtLSiPhGW7MXLahgES6Oc2dUFaQ+wDfcelkrQaOVZkM4wwAzSapxuf/13oSIk # ZX2ewQEwTZrVYXELO02KQIKUR30s/oslGVg77ALnfK9qSS96Iwjd4MyT7PzCkHUi # ilwyGJi5a4ofiULiPSwUQNynSBqxa+JQALkHP682b5xhjoDfyG8laR234FTPtYgs # P/FaeviwENU5Pl+812NbbtRD+gKlWBZz+7FKykOT/CG8sZahgg1EMIINQAYKKwYB # BAGCNwMDATGCDTAwgg0sBgkqhkiG9w0BBwKggg0dMIINGQIBAzEPMA0GCWCGSAFl # AwQCAQUAMHcGCyqGSIb3DQEJEAEEoGgEZjBkAgEBBglghkgBhv1sBwEwMTANBglg # hkgBZQMEAgEFAAQgJhABfkDIPbI+nWYnA30FLTyaPK+W3QieT21B/vK+CMICEDF0 # worcGsdd7OxpXLP60xgYDzIwMjEwNDA4MDkxMTA5WqCCCjcwggT+MIID5qADAgEC # AhANQkrgvjqI/2BAIc4UAPDdMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNVBAYTAlVT # MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j # b20xMTAvBgNVBAMTKERpZ2lDZXJ0IFNIQTIgQXNzdXJlZCBJRCBUaW1lc3RhbXBp # bmcgQ0EwHhcNMjEwMTAxMDAwMDAwWhcNMzEwMTA2MDAwMDAwWjBIMQswCQYDVQQG # EwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xIDAeBgNVBAMTF0RpZ2lDZXJ0 # IFRpbWVzdGFtcCAyMDIxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA # wuZhhGfFivUNCKRFymNrUdc6EUK9CnV1TZS0DFC1JhD+HchvkWsMlucaXEjvROW/ # m2HNFZFiWrj/ZwucY/02aoH6KfjdK3CF3gIY83htvH35x20JPb5qdofpir34hF0e # dsnkxnZ2OlPR0dNaNo/Go+EvGzq3YdZz7E5tM4p8XUUtS7FQ5kE6N1aG3JMjjfdQ # Jehk5t3Tjy9XtYcg6w6OLNUj2vRNeEbjA4MxKUpcDDGKSoyIxfcwWvkUrxVfbENJ # Cf0mI1P2jWPoGqtbsR0wwptpgrTb/FZUvB+hh6u+elsKIC9LCcmVp42y+tZji06l # chzun3oBc/gZ1v4NSYS9AQIDAQABo4IBuDCCAbQwDgYDVR0PAQH/BAQDAgeAMAwG # A1UdEwEB/wQCMAAwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwgwQQYDVR0gBDowODA2 # BglghkgBhv1sBwEwKTAnBggrBgEFBQcCARYbaHR0cDovL3d3dy5kaWdpY2VydC5j # b20vQ1BTMB8GA1UdIwQYMBaAFPS24SAd/imu0uRhpbKiJbLIFzVuMB0GA1UdDgQW # BBQ2RIaOpLqwZr68KC0dRDbd42p6vDBxBgNVHR8EajBoMDKgMKAuhixodHRwOi8v # Y3JsMy5kaWdpY2VydC5jb20vc2hhMi1hc3N1cmVkLXRzLmNybDAyoDCgLoYsaHR0 # cDovL2NybDQuZGlnaWNlcnQuY29tL3NoYTItYXNzdXJlZC10cy5jcmwwgYUGCCsG # AQUFBwEBBHkwdzAkBggrBgEFBQcwAYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29t # ME8GCCsGAQUFBzAChkNodHRwOi8vY2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNl # cnRTSEEyQXNzdXJlZElEVGltZXN0YW1waW5nQ0EuY3J0MA0GCSqGSIb3DQEBCwUA # A4IBAQBIHNy16ZojvOca5yAOjmdG/UJyUXQKI0ejq5LSJcRwWb4UoOUngaVNFBUZ # B3nw0QTDhtk7vf5EAmZN7WmkD/a4cM9i6PVRSnh5Nnont/PnUp+Tp+1DnnvntN1B # Ion7h6JGA0789P63ZHdjXyNSaYOC+hpT7ZDMjaEXcw3082U5cEvznNZ6e9oMvD0y # 0BvL9WH8dQgAdryBDvjA4VzPxBFy5xtkSdgimnUVQvUtMjiB2vRgorq0Uvtc4GEk # JU+y38kpqHNDUdq9Y9YfW5v3LhtPEx33Sg1xfpe39D+E68Hjo0mh+s6nv1bPull2 # YYlffqe0jmd4+TaY4cso2luHpoovMIIFMTCCBBmgAwIBAgIQCqEl1tYyG35B5AXa # NpfCFTANBgkqhkiG9w0BAQsFADBlMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGln # aUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtE # aWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwHhcNMTYwMTA3MTIwMDAwWhcNMzEw # MTA3MTIwMDAwWjByMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5j # MRkwFwYDVQQLExB3d3cuZGlnaWNlcnQuY29tMTEwLwYDVQQDEyhEaWdpQ2VydCBT # SEEyIEFzc3VyZWQgSUQgVGltZXN0YW1waW5nIENBMIIBIjANBgkqhkiG9w0BAQEF # AAOCAQ8AMIIBCgKCAQEAvdAy7kvNj3/dqbqCmcU5VChXtiNKxA4HRTNREH3Q+X1N # aH7ntqD0jbOI5Je/YyGQmL8TvFfTw+F+CNZqFAA49y4eO+7MpvYyWf5fZT/gm+vj # RkcGGlV+Cyd+wKL1oODeIj8O/36V+/OjuiI+GKwR5PCZA207hXwJ0+5dyJoLVOOo # CXFr4M8iEA91z3FyTgqt30A6XLdR4aF5FMZNJCMwXbzsPGBqrC8HzP3w6kfZiFBe # /WZuVmEnKYmEUeaC50ZQ/ZQqLKfkdT66mA+Ef58xFNat1fJky3seBdCEGXIX8RcG # 7z3N1k3vBkL9olMqT4UdxB08r8/arBD13ays6Vb/kwIDAQABo4IBzjCCAcowHQYD # VR0OBBYEFPS24SAd/imu0uRhpbKiJbLIFzVuMB8GA1UdIwQYMBaAFEXroq/0ksuC # MS1Ri6enIZ3zbcgPMBIGA1UdEwEB/wQIMAYBAf8CAQAwDgYDVR0PAQH/BAQDAgGG # MBMGA1UdJQQMMAoGCCsGAQUFBwMIMHkGCCsGAQUFBwEBBG0wazAkBggrBgEFBQcw # AYYYaHR0cDovL29jc3AuZGlnaWNlcnQuY29tMEMGCCsGAQUFBzAChjdodHRwOi8v # Y2FjZXJ0cy5kaWdpY2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3J0 # MIGBBgNVHR8EejB4MDqgOKA2hjRodHRwOi8vY3JsNC5kaWdpY2VydC5jb20vRGln # aUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMDqgOKA2hjRodHRwOi8vY3JsMy5kaWdp # Y2VydC5jb20vRGlnaUNlcnRBc3N1cmVkSURSb290Q0EuY3JsMFAGA1UdIARJMEcw # OAYKYIZIAYb9bAACBDAqMCgGCCsGAQUFBwIBFhxodHRwczovL3d3dy5kaWdpY2Vy # dC5jb20vQ1BTMAsGCWCGSAGG/WwHATANBgkqhkiG9w0BAQsFAAOCAQEAcZUS6VGH # VmnN793afKpjerN4zwY3QITvS4S/ys8DAv3Fp8MOIEIsr3fzKx8MIVoqtwU0HWqu # mfgnoma/Capg33akOpMP+LLR2HwZYuhegiUexLoceywh4tZbLBQ1QwRostt1AuBy # x5jWPGTlH0gQGF+JOGFNYkYkh2OMkVIsrymJ5Xgf1gsUpYDXEkdws3XVk4WTfraS # Z/tTYYmo9WuWwPRYaQ18yAGxuSh1t5ljhSKMYcp5lH5Z/IwP42+1ASa2bKXuh1Eh # 5Fhgm7oMLSttosR+u8QlK0cCCHxJrhO24XxCQijGGFbPQTS2Zl22dHv1VjMiLyI2 # skuiSpXY9aaOUjGCAk0wggJJAgEBMIGGMHIxCzAJBgNVBAYTAlVTMRUwEwYDVQQK # EwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20xMTAvBgNV # BAMTKERpZ2lDZXJ0IFNIQTIgQXNzdXJlZCBJRCBUaW1lc3RhbXBpbmcgQ0ECEA1C # SuC+Ooj/YEAhzhQA8N0wDQYJYIZIAWUDBAIBBQCggZgwGgYJKoZIhvcNAQkDMQ0G # CyqGSIb3DQEJEAEEMBwGCSqGSIb3DQEJBTEPFw0yMTA0MDgwOTExMDlaMCsGCyqG # SIb3DQEJEAIMMRwwGjAYMBYEFOHXgqjhkb7va8oWkbWqtJSmJJvzMC8GCSqGSIb3 # DQEJBDEiBCDvFxQ6lYLr8vB+9czUl19rjCw1pWhhUXw/SqOmvIa/VDANBgkqhkiG # 9w0BAQEFAASCAQB9ox2UrcUXQsBI4Uycnhl4AMpvhVXJME62tygFMppW1l7QftDy # LvfPKRYm2YUioak/APxAS6geRKpeMkLvXuQS/Jlv0kY3BjxkeG0eVjvyjF4SvXbZ # 3JCk9m7wLNE+xqOo0ICjYlIJJgRLudjWkC5Skpb1NpPS8DOaIYwRV+AWaSOUPd9P # O5yVcnbl7OpK3EAEtwDrybCVBMPn2MGhAXybIHnth3+MFp1b6Blhz3WlReQyarjq # 1f+zaFB79rg6JswXoOTJhwICBP3hO2Ua3dMAswbfl+QNXF+igKLJPYnaeSVhBbm6 # VCu2io27t4ixqvoD0RuPObNX/P3oVA38afiM # SIG # End signature block
Ekozmaster/NvidiaOmniverseRTXRemixTools/tools/packman/bootstrap/install_package.py
# Copyright 2019 NVIDIA CORPORATION # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import shutil import sys import tempfile import zipfile __author__ = "hfannar" logging.basicConfig(level=logging.WARNING, format="%(message)s") logger = logging.getLogger("install_package") class TemporaryDirectory: def __init__(self): self.path = None def __enter__(self): self.path = tempfile.mkdtemp() return self.path def __exit__(self, type, value, traceback): # Remove temporary data created shutil.rmtree(self.path) def install_package(package_src_path, package_dst_path): with zipfile.ZipFile(package_src_path, allowZip64=True) as zip_file, TemporaryDirectory() as temp_dir: zip_file.extractall(temp_dir) # Recursively copy (temp_dir will be automatically cleaned up on exit) try: # Recursive copy is needed because both package name and version folder could be missing in # target directory: shutil.copytree(temp_dir, package_dst_path) except OSError as exc: logger.warning("Directory %s already present, packaged installation aborted" % package_dst_path) else: logger.info("Package successfully installed to %s" % package_dst_path) install_package(sys.argv[1], sys.argv[2])
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/add_model.py
import os from pathlib import Path from typing import List import omni from omni.client import make_relative_url from omni.kit.window.file_importer import get_file_importer from omni.kit.window.file_exporter import get_file_exporter import omni.usd as usd from pxr import UsdGeom, Usd, Sdf from ekozerski.rtxremixtools.utils import find_inst_hash_prim, find_source_mesh_hash_prim from ekozerski.rtxremixtools.commons import log_info from ekozerski.rtxremixtools import mesh_utils class UserCache: LAST_OPENED_MODEL = None def open_export_dialog_for_captured_mesh(prim_path, mesh): def setup_references_in_stage(current_stage, reference_file_location): _, mesh_hash, __ = Usd.Prim.GetName(mesh.GetParent()).split('_') xform_prim_path = f'/RootNode/meshes/mesh_{mesh_hash}/Xform_{mesh_hash}_0' omni.kit.commands.execute('CreatePrim', prim_type='Xform', prim_path=xform_prim_path) editing_layer = current_stage.GetEditTarget().GetLayer() relative_file_path = make_relative_url(editing_layer.realPath, reference_file_location) omni.kit.commands.execute('AddReference', stage=current_stage, prim_path=Sdf.Path(xform_prim_path), reference=Sdf.Reference(relative_file_path) ) selection = omni.usd.get_context().get_selection() selection.clear_selected_prim_paths() source_layer = mesh.GetPrimStack()[-1].layer source_layer.Reload() selection.set_selected_prim_paths([xform_prim_path], False) def file_export_handler(filename: str, dirname: str, extension: str = "", selections: List[str] = []): stage = Usd.Stage.CreateInMemory() root_xform = UsdGeom.Xform.Define(stage, '/root').GetPrim() stage.SetDefaultPrim(root_xform) new_mesh = UsdGeom.Mesh.Define(stage, f'/root/{prim_path.rsplit("/", 1)[-1]}') needed_attr_names = ['doubleSided', 'extent', 'faceVertexCounts', 'faceVertexIndices', 'normals', 'points', 'primvars:st'] [ new_mesh.GetPrim().CreateAttribute(attr.GetName(), attr.GetTypeName()).Set(attr.Get()) for attr in mesh.GetAttributes() if attr.Get() and attr.GetName() in needed_attr_names ] mesh_utils.convert_mesh_to_vertex_interpolation_mode(new_mesh) ctx = usd.get_context() current_stage = ctx.get_stage() upAxis = UsdGeom.GetStageUpAxis(current_stage) UsdGeom.SetStageUpAxis(stage, upAxis) save_location = dirname + filename + extension stage.Export(save_location) setup_references_in_stage(current_stage, save_location) log_info(f"> Exporting {prim_path} in '{save_location}'") source_layer = mesh.GetPrimStack()[-1].layer rtx_remix_path_parts = source_layer.realPath.split(os.path.join("rtx-remix"), 1) if len(rtx_remix_path_parts) > 1: rtx_remix_path = os.path.join(rtx_remix_path_parts[0], "rtx-remix", "mods", "gameReadyAssets") else: rtx_remix_path = source_layer.realPath rtx_remix_path = os.path.join(rtx_remix_path, "CustomMesh") file_exporter = get_file_exporter() file_exporter.show_window( title=f'Export "{prim_path}"', export_button_label="Save", export_handler=file_export_handler, filename_url=rtx_remix_path, ) def copy_original_mesh(prim_path, mesh, output_path): stage = Usd.Stage.CreateInMemory() root_xform = UsdGeom.Xform.Define(stage, '/root').GetPrim() stage.SetDefaultPrim(root_xform) new_mesh = UsdGeom.Mesh.Define(stage, f'/root/{prim_path.rsplit("/", 1)[-1]}') needed_attr_names = ['doubleSided', 'extent', 'faceVertexCounts', 'faceVertexIndices', 'normals', 'points', 'primvars:st'] [ new_mesh.GetPrim().CreateAttribute(attr.GetName(), attr.GetTypeName()).Set(attr.Get()) for attr in mesh.GetAttributes() if attr.Get() and attr.GetName() in needed_attr_names ] mesh_utils.convert_mesh_to_vertex_interpolation_mode(new_mesh) ctx = usd.get_context() current_stage = ctx.get_stage() upAxis = UsdGeom.GetStageUpAxis(current_stage) UsdGeom.SetStageUpAxis(stage, upAxis) stage.Export(output_path) def setup_references_in_stage(mesh, current_stage, reference_file_location): inst_hash_prim = find_inst_hash_prim(mesh) _, mesh_hash, __ = Usd.Prim.GetName(inst_hash_prim).split('_') export_prim_name = os.path.basename(reference_file_location).split('.', 1)[0] xform_prim_path = f'/RootNode/meshes/mesh_{mesh_hash}/{export_prim_name}' omni.kit.commands.execute('CreatePrim', prim_type='Xform', prim_path=xform_prim_path) editing_layer = current_stage.GetEditTarget().GetLayer() relative_file_path = make_relative_url(editing_layer.realPath, reference_file_location) omni.kit.commands.execute('AddReference', stage=current_stage, prim_path=Sdf.Path(xform_prim_path), reference=Sdf.Reference(relative_file_path) ) source_layer = mesh.GetPrimStack()[-1].layer source_layer.Reload() selection = omni.usd.get_context().get_selection() selection.clear_selected_prim_paths() selection.set_selected_prim_paths([xform_prim_path], False) def open_export_dialog_for_captured_mesh(prim_path, mesh): def export_mesh(filename: str, dirname: str, extension: str = "", selections: List[str] = []): file_location = dirname + filename + extension copy_original_mesh(prim_path, mesh, file_location) ctx = usd.get_context() current_stage = ctx.get_stage() setup_references_in_stage(mesh, current_stage, file_location) source_layer = mesh.GetPrimStack()[-1].layer rtx_remix_path_parts = source_layer.realPath.split(os.path.join("rtx-remix"), 1) rtx_remix_path = source_layer.realPath if len(rtx_remix_path_parts) > 1: rtx_remix_path = os.path.join(rtx_remix_path_parts[0], "rtx-remix", "mods", "gameReadyAssets") rtx_remix_path = os.path.join(rtx_remix_path, "CustomMesh") file_exporter = get_file_exporter() file_exporter.show_window( title=f'Export "{prim_path}"', export_button_label="Save", export_handler=export_mesh, filename_url=rtx_remix_path, ) def open_import_dialog_for_add_models(prim_path): def import_mesh(filename: str, dirname: str, selections: List[str] = []): # TODO: Loop through all selections and add them all to the mesh_HASH with their respective xforms correctly named without collisions. mesh_path = mesh.GetPath().pathString new_selection = list() counter = 0 for reference_file in selections: xform_name = Path(reference_file).stem new_mesh_path = mesh_path + f'/{xform_name}_{counter}' while current_stage.GetPrimAtPath(new_mesh_path).IsValid(): counter += 1 new_mesh_path = mesh_path + f'/{xform_name}_{counter}' omni.kit.commands.execute('CreatePrim', prim_type='Xform', prim_path=new_mesh_path) editing_layer = current_stage.GetEditTarget().GetLayer() relative_file_path = make_relative_url(editing_layer.realPath, reference_file) omni.kit.commands.execute('AddReference', stage=current_stage, prim_path=Sdf.Path(new_mesh_path), reference=Sdf.Reference(relative_file_path) ) new_selection.append(new_mesh_path) UserCache.LAST_OPENED_MODEL = os.path.dirname(reference_file) counter += 1 source_layer = mesh.GetPrimStack()[-1].layer source_layer.Reload() selection = omni.usd.get_context().get_selection() selection.clear_selected_prim_paths() selection.set_selected_prim_paths(new_selection, False) ctx = usd.get_context() current_stage = ctx.get_stage() inst_prim = current_stage.GetPrimAtPath(prim_path) mesh = find_source_mesh_hash_prim(current_stage, inst_prim) source_layer = mesh.GetPrimStack()[-1].layer filename_url = UserCache.LAST_OPENED_MODEL if UserCache.LAST_OPENED_MODEL is not None else source_layer.realPath file_importer = get_file_importer() file_importer.show_window( title=f'Import Models', import_button_label="Import", import_handler=import_mesh, filename_url=filename_url, ) def open_add_model_dialog(): for path in usd.get_context().get_selection().get_selected_prim_paths(): open_import_dialog_for_add_models(path) def open_mesh_replacement_setup_dialog(): for path, mesh in mesh_utils.get_selected_mesh_prims().items(): if mesh_utils.is_a_captured_mesh(mesh): open_export_dialog_for_captured_mesh(path, mesh)
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/commons.py
import carb def log_info(msg: str): carb.log_info(f"[RTX Remix Tool] {msg}") def log_warn(msg: str): carb.log_warn(f"[RTX Remix Tool] {msg}") def log_error(msg: str): carb.log_error(f"[RTX Remix Tool] {msg}")
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/extension.py
import omni.ext import omni.ui as ui from omni.kit import context_menu from omni.kit.hotkeys.core import get_hotkey_registry from omni.kit.actions.core import get_action_registry from . import commons from .rtx_context_menu import build_rtx_remix_menu # Any class derived from `omni.ext.IExt` in top level module (defined in `python.modules` of `extension.toml`) will be # instantiated when extension gets enabled and `on_startup(ext_id)` will be called. Later when extension gets disabled # on_shutdown() is called. class RtxRemixTools(omni.ext.IExt): def on_startup(self, ext_id): self.ext_id = ext_id commons.log_info(f"Starting Up") menu = {"name": "RTX Remix", "populate_fn": build_rtx_remix_menu} self._context_menu_subscription = context_menu.add_menu(menu, "MENU", "") self.hotkey_registry = get_hotkey_registry() register_actions(self.ext_id) self.select_source_mesh_hotkey = self.hotkey_registry.register_hotkey( self.ext_id, "SHIFT + F", self.ext_id, "select_source_mesh", filter=None, ) def on_shutdown(self): commons.log_info(f"Shutting Down") # remove event self._context_menu_subscription.release() self.hotkey_registry.deregister_hotkey( self.select_source_mesh_hotkey, ) deregister_actions(self.ext_id) def register_actions(extension_id): from . import select_source_mesh action_registry = get_action_registry() actions_tag = "RTX Remix Tools Actions" action_registry.register_action( extension_id, "select_source_mesh", select_source_mesh.select_source_meshes, display_name="Select Source Mesh", description="Selects the corresponding mesh_HASH the prim is related to.", tag=actions_tag, ) def deregister_actions(extension_id): action_registry = get_action_registry() action_registry.deregister_all_actions_for_extension(extension_id)
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/__init__.py
from .extension import *
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/mesh_utils.py
from collections import OrderedDict import os from pxr import UsdGeom, Usd, Sdf import omni.usd as usd from ekozerski.rtxremixtools.commons import log_error def get_selected_mesh_prims(): ctx = usd.get_context() current_stage = ctx.get_stage() selection = ctx.get_selection().get_selected_prim_paths() selected_prims = { path: current_stage.GetPrimAtPath(path) for path in selection } meshes = { prim_path: prim for prim_path, prim in selected_prims.items() if UsdGeom.Mesh(prim) } return meshes def convert_mesh_to_vertex_interpolation_mode(mesh): """ This method attemps to convert Remix meshes' interpolation mode from constant or faceVarying to vertex. If there is any faceVarying attribute, it means the data arrays (points, uvs, normals...) will have different lengths, so this script will copy data around using the faceVertexIndices array to ensure they all end up with the same length. """ # TODO: Study interpolation modes in depth to implement a decent conversion script. prim = mesh.GetPrim() primvar_api = UsdGeom.PrimvarsAPI(prim) primvars = {var for var in primvar_api.GetPrimvars()} face_varying_primvars = [v for v in primvars if v.GetInterpolation() == UsdGeom.Tokens.faceVarying] if face_varying_primvars or mesh.GetNormalsInterpolation() == UsdGeom.Tokens.faceVarying: non_face_varying_primvars = list(primvars.difference(face_varying_primvars)) non_face_varying_primvars = [var for var in non_face_varying_primvars if var.GetInterpolation() != 'uniform'] indices = prim.GetAttribute("faceVertexIndices") # Settings points separately since it doesn't have a "SetInterpolation" like primvars. points = prim.GetAttribute("points") points_arr = points.Get() new_arr = [points_arr[i] for i in indices.Get()] points.Set(new_arr) for var in non_face_varying_primvars: original_arr = var.Get() if original_arr: new_arr = [original_arr[i] for i in indices.Get()] var.Set(new_arr) indices.Set([i for i in range(len(indices.Get()))]) [var.SetInterpolation(UsdGeom.Tokens.vertex) for var in primvars] mesh.SetNormalsInterpolation(UsdGeom.Tokens.vertex) def convert_uv_primvars_to_st(mesh): # https://github.com/NVIDIAGameWorks/dxvk-remix/blob/ebb0ecfd638d6a32ab5f10708b5b07bc763cf79b/src/dxvk/rtx_render/rtx_mod_usd.cpp#L696 # https://github.com/Kim2091/RTXRemixTools/blob/8ae25224ef8d1d284f3e208f671b2ce6a35b82af/RemixMeshConvert/For%20USD%20Composer/RemixMeshConvert_OV.py#L4 known_uv_names = [ 'primvars:st', 'primvars:uv', 'primvars:st0', 'primvars:st1', 'primvars:st2', 'primvars:UVMap', 'primvars:UVChannel_1', 'primvars:map1', ] # Preserving the order of found primvars to use the first one, in case a primvars:st can't be found. primvar_api = UsdGeom.PrimvarsAPI(mesh) uv_primvars = OrderedDict( (primvar.GetName(), primvar) for primvar in primvar_api.GetPrimvars() if primvar.GetTypeName().role == 'TextureCoordinate' or primvar.GetName() in known_uv_names ) if not uv_primvars: return # Picking only one UV and blowing up everything else as the runtime only reads the first anyway. considered_uv = uv_primvars.get('primvars:st') or next(iter(uv_primvars.values())) uv_data = considered_uv.Get() [primvar_api.RemovePrimvar(uv_name) for uv_name in uv_primvars.keys()] # Recreating the primvar with appropriate name, type and role new_uv_primvar = primvar_api.CreatePrimvar('primvars:st', Sdf.ValueTypeNames.TexCoord2fArray, UsdGeom.Tokens.vertex) new_uv_primvar.Set(uv_data) def remove_unused_primvars(mesh): unused_primvar_names = [ 'primvars:displayColor', 'primvars:displayOpacity', ] primvar_api = UsdGeom.PrimvarsAPI(mesh) [primvar_api.RemovePrimvar(uv_name) for uv_name in unused_primvar_names] def fix_meshes_in_file(usd_file_path): stage = Usd.Stage.Open(usd_file_path) mesh_prims = [prim for prim in stage.TraverseAll() if UsdGeom.Mesh(prim)] for prim in mesh_prims: faceVertices = prim.GetAttribute("faceVertexCounts").Get() if not faceVertices or not all({x == 3 for x in faceVertices}): log_error(f"Mesh {prim.GetPath()} in '{usd_file_path}' hasn't been triangulated and this tools doesn't do that for you yet :(") continue convert_mesh_to_vertex_interpolation_mode(UsdGeom.Mesh(prim)) convert_uv_primvars_to_st(UsdGeom.Mesh(prim)) remove_unused_primvars(UsdGeom.Mesh(prim)) stage.Save() def is_a_captured_mesh(mesh): """ Returns True if the Mesh's defining USD file is located in the captures folder. """ return os.path.normpath("captures/meshes") in os.path.normpath(mesh.GetPrimStack()[-1].layer.realPath) def fix_meshes_geometry(): meshes = {k: v for k,v in get_selected_mesh_prims().items() if not is_a_captured_mesh(v)} for path, mesh in meshes.items(): source_layer = mesh.GetPrimStack()[-1].layer fix_meshes_in_file(source_layer.realPath) source_layer.Reload()
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/add_material.py
import os from typing import List from omni import usd, kit from omni.kit.window.file_importer import get_file_importer from omni.client import make_relative_url from ekozerski.rtxremixtools.utils import find_source_mesh_hash_prim def open_add_material_dialog_for_prim(mesh_hash, ctx, current_stage): def create_material_from_mdl_file(filename: str, dirname: str, selections: List[str] = []): if not filename.endswith('mdl'): raise ValueError(f"The selected file '{filename}' doesn't have a mdl extension.") mesh_hash_path = mesh_hash.GetPath().pathString counter = 0 material_name = os.path.basename(filename).replace('.mdl', '') new_material_path = mesh_hash_path + f'/{material_name}_{counter}' while current_stage.GetPrimAtPath(new_material_path).IsValid(): counter += 1 new_material_path = mesh_hash_path + f'/{material_name}_{counter}' # TODO: Get material name by inspecting the MDL file rather than guessing from it's name, so users can # rename it at will. mtl_name = 'AperturePBR_Opacity' if 'Opacity' in filename else 'AperturePBR_Translucent' editing_layer = current_stage.GetEditTarget().GetLayer() relative_file_path = make_relative_url(editing_layer.realPath, os.path.join(dirname, filename)) success, _ = kit.commands.execute('CreateMdlMaterialPrimCommand', mtl_url=relative_file_path, mtl_name=mtl_name, mtl_path=new_material_path, select_new_prim=True, ) def filter_handler(filename: str, _, extension_option): if extension_option == '.mdl': return filename.lower().endswith('.mdl') return True file_importer = get_file_importer() file_importer.show_window( title=f'Select MDL File', import_button_label="Select", import_handler=create_material_from_mdl_file, file_extension_types=[(".mdl", "Opacity or Translucent MDL file")], file_filter_handler=filter_handler, ) def open_add_material_dialog(): ctx = usd.get_context() current_stage = ctx.get_stage() selection = ctx.get_selection().get_selected_prim_paths() selected_prims = { path: current_stage.GetPrimAtPath(path) for path in selection } source_meshes = [find_source_mesh_hash_prim(current_stage, prim) for prim in selected_prims.values()] source_meshes = set([mesh for mesh in source_meshes if mesh is not None]) for mesh_hash in list(source_meshes): open_add_material_dialog_for_prim(mesh_hash, ctx, current_stage)
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/utils.py
from pxr import Usd from omni import usd def find_source_mesh_hash_prim(current_stage, prim): if not current_stage.GetPrimAtPath('/RootNode/meshes'): return prim search_prim = prim valid_paths = ['/RootNode/meshes', '/RootNode/instances'] while search_prim.GetParent().IsValid() and search_prim.GetParent().GetPath().pathString not in valid_paths: search_prim = search_prim.GetParent() if not search_prim: return None if 'mesh_' in Usd.Prim.GetName(search_prim): return search_prim _, mesh_hash, __ = Usd.Prim.GetName(search_prim).split('_') mesh_prim_path = f'/RootNode/meshes/mesh_{mesh_hash}' return current_stage.GetPrimAtPath(mesh_prim_path) def find_inst_hash_prim(instance_mesh): search_prim = instance_mesh root_path = '/RootNode/instances' while search_prim.GetParent().IsValid() and search_prim.GetParent().GetPath().pathString != root_path: search_prim = search_prim.GetParent() if not search_prim: return None return search_prim
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/preserve_draw_calls.py
from omni import usd, kit from pxr import Sdf from ekozerski.rtxremixtools.utils import find_source_mesh_hash_prim def set_preserve_original_draw_call(enabled: bool = False): ctx = usd.get_context() current_stage = ctx.get_stage() selection = ctx.get_selection().get_selected_prim_paths() selected_prims = { path: current_stage.GetPrimAtPath(path) for path in selection } source_meshes = [find_source_mesh_hash_prim(current_stage, prim) for prim in selected_prims.values()] source_meshes = set([mesh for mesh in source_meshes if mesh is not None]) for mesh_prim in source_meshes: kit.commands.execute( 'CreateUsdAttributeCommand', prim=mesh_prim, attr_name='preserveOriginalDrawCall', attr_type=Sdf.ValueTypeNames.Int, attr_value=1 if enabled else 0 )
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/rtx_context_menu.py
from omni.kit.ui import get_custom_glyph_code from omni import usd import omni.ui as ui from . import mesh_utils from . import add_model from . import add_material from . import preserve_draw_calls from . import select_source_mesh def _build_fix_mesh_geometry_menu_item(): tooltip = ''.join([ 'Interpolation Mode\n', 'OBS: Operation Can\'t be undone\n', ' RTX Remix runtime only supports "vertex" interpolation mode, in which "points", "normals" and "uvs" arrays ', 'must have the same length, but DCC tools usually export the mesh using "faceVarying" interpolation mode.', 'This operation reorganizes the geometry to be compatible with the runtime. See:\n', ' "Interpolation of Geometric Primitive Variables" - https://openusd.org/dev/api/class_usd_geom_primvar.html', '\n\nThis operation only applies for meshes inside the mods folder, not the captured ones.', ]) ui.MenuItem( "Fix Meshes Geometry", triggered_fn=mesh_utils.fix_meshes_geometry, enabled=any([ not mesh_utils.is_a_captured_mesh(mesh) for mesh in mesh_utils.get_selected_mesh_prims().values() ]), tooltip=tooltip ) def _build_setup_for_mesh_replacements_menu_item(): tooltip = ''.join([ "Export the original mesh to a selected location and setup the references to work within the runtime so you", " can focus on remodeling the mesh and export back at the same location." ]) ui.MenuItem( "Setup for Mesh Replacement", triggered_fn=add_model.open_mesh_replacement_setup_dialog, enabled=any([ mesh_utils.is_a_captured_mesh(mesh) for mesh in mesh_utils.get_selected_mesh_prims().values() ]), tooltip=tooltip ) def _build_add_model_menu_item(): tooltip = ''.join([ "Add external authored meshes to the prim, setting up properly to work within the runtime." ]) ui.MenuItem( "Add Model", triggered_fn=add_model.open_add_model_dialog, tooltip=tooltip, enabled=bool(usd.get_context().get_selection().get_selected_prim_paths()) ) def _build_add_material_menu_item(): tooltip = ''.join([ "Add a material defined from an external MDL file to the selected prim." ]) ui.MenuItem( "Add Material", triggered_fn=add_material.open_add_material_dialog, tooltip=tooltip, enabled=bool(usd.get_context().get_selection().get_selected_prim_paths()) ) def _build_preserve_original_draw_call_menu_item(): tooltip = ''.join([ "Add a 'custom int preserveOriginalDrawCall' attribute set to '1' to the mesh_HASH prim. Used to indicate to", " the runtime whether it should keep rendering the original mesh or not. Should be set when adding custom ", " lights without removing the original mesh from rendering." ]) ui.MenuItem( "Preserve", triggered_fn=lambda: preserve_draw_calls.set_preserve_original_draw_call(True), tooltip=tooltip, enabled=bool(usd.get_context().get_selection().get_selected_prim_paths()) ) def _build_dont_preserve_original_draw_call_menu_item(): tooltip = ''.join([ "Add a 'custom int preserveOriginalDrawCall' attribute set to '0' to the mesh_HASH prim. Used to indicate to", " the runtime whether it should keep rendering the original mesh or not. Should be set when adding custom ", " lights without removing the original mesh from rendering." ]) ui.MenuItem( "Don't Preserve", triggered_fn=lambda: preserve_draw_calls.set_preserve_original_draw_call(False), tooltip=tooltip, enabled=bool(usd.get_context().get_selection().get_selected_prim_paths()) ) def _build_select_source_meshes_menu(): tooltip = ''.join([ "Selects the corresponding mesh_HASH the prim is related to." ]) ui.MenuItem( "Select Source Mesh (Shift + F)", triggered_fn=select_source_mesh.select_source_meshes, tooltip=tooltip, enabled=bool(usd.get_context().get_selection().get_selected_prim_paths()) ) def build_rtx_remix_menu(event): icon = get_custom_glyph_code("${glyphs}/menu_create.svg") with ui.Menu(f' {icon} RTX Remix'): _build_fix_mesh_geometry_menu_item() _build_setup_for_mesh_replacements_menu_item() _build_add_model_menu_item() _build_add_material_menu_item() with ui.Menu(f'Original Draw Call Preservation'): _build_preserve_original_draw_call_menu_item() _build_dont_preserve_original_draw_call_menu_item() _build_select_source_meshes_menu()
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/select_source_mesh.py
from omni import usd from ekozerski.rtxremixtools.utils import find_source_mesh_hash_prim def select_source_meshes(): ctx = usd.get_context() current_stage = ctx.get_stage() selection = ctx.get_selection().get_selected_prim_paths() selected_prims = { path: current_stage.GetPrimAtPath(path) for path in selection } source_meshes = [find_source_mesh_hash_prim(current_stage, prim) for prim in selected_prims.values()] source_meshes = set([mesh for mesh in source_meshes if mesh is not None]) paths = [mesh.GetPath().pathString for mesh in source_meshes] selection = usd.get_context().get_selection() selection.clear_selected_prim_paths() selection.set_selected_prim_paths(paths, False)
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/tests/__init__.py
from .test_hello_world import *
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/ekozerski/rtxremixtools/tests/test_hello_world.py
# NOTE: # omni.kit.test - std python's unittest module with additional wrapping to add suport for async/await tests # For most things refer to unittest docs: https://docs.python.org/3/library/unittest.html import omni.kit.test # Extnsion for writing UI tests (simulate UI interaction) import omni.kit.ui_test as ui_test # Import extension python module we are testing with absolute import path, as if we are external user (other extension) import ekozerski.rtxremixtools # Having a test class dervived from omni.kit.test.AsyncTestCase declared on the root of module will make it auto-discoverable by omni.kit.test class Test(omni.kit.test.AsyncTestCase): # Before running each test async def setUp(self): pass # After running each test async def tearDown(self): pass # Actual test, notice it is "async" function, so "await" can be used if needed @omni.kit.test.omni_test_registry(guid="f898a949-bacc-41f5-be56-b4eb8923f54e") async def test_hello_public_function(self): result = ekozerski.rtxremixtools.some_public_function(4) self.assertEqual(result, 256) @omni.kit.test.omni_test_registry(guid="4626d574-659f-4a85-8958-9fa8588fbce3") async def test_window_button(self): # Find a label in our window label = ui_test.find("My Window//Frame/**/Label[*]") # Find buttons in our window add_button = ui_test.find("My Window//Frame/**/Button[*].text=='Add'") reset_button = ui_test.find("My Window//Frame/**/Button[*].text=='Reset'") # Click reset button await reset_button.click() self.assertEqual(label.widget.text, "empty") await add_button.click() self.assertEqual(label.widget.text, "count: 1") await add_button.click() self.assertEqual(label.widget.text, "count: 2")
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/config/extension.toml
[core] reloadable = true [package] # Semantic Versioning is used: https://semver.org/ version = "0.0.2" # Lists people or organizations that are considered the "authors" of the package. authors = ["Emanuel Kozerski"] # The title and description fields are primarily for displaying extension info in UI title = "RTX Remix Tools" description="Simple toolkit for creating remixing assets compatible with RTX Remix runtime" # Path (relative to the root) or content of readme markdown file for UI. readme = "docs/README.md" # URL of the extension source repository. repository = "https://github.com/Ekozmaster/Nvidia-Omniverse-RTX-Remix-Tools" # One of categories for UI. category = "Other" # Keywords for the extension keywords = ["Tool", "Toolkit", "Tools", "RTX", "Remix"] # Location of change log file in target (final) folder of extension, relative to the root. # More info on writing changelog: https://keepachangelog.com/en/1.0.0/ changelog="docs/CHANGELOG.md" # Preview image and icon. Folder named "data" automatically goes in git lfs (see .gitattributes file). # Preview image is shown in "Overview" of Extensions window. Screenshot of an extension might be a good preview image. preview_image = "data/preview.png" # Icon is shown in Extensions window, it is recommended to be square, of size 256x256. icon = "data/icon.png" # Use omni.ui to build simple UI [dependencies] "omni.kit.uiapp" = {} # Main python module this extension provides, it will be publicly available as "import ekozerski.rtxremixtools". [[python.module]] name = "ekozerski.rtxremixtools" [[test]] # Extra dependencies only to be used during test run dependencies = [ "omni.kit.ui_test" # UI testing extension ]
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/docs/CHANGELOG.md
# Changelog The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## [0.0.3] - 2023-12-22 - "Add Model", "Add Material" and "Fix Mesh Geometry" also works when not in a capture scene now. - Fixed somes errors when using "Fix Mesh Geometry" option in some meshes. - Added "Shift + F" hotkey to "Select Source Mesh". - Fixed error when using "Setup for Mesh Replacement" on captures which nests original game meshes inside a "ref" Xform. - Added convertion of many "primvar:*" name variations for UV-related primvars to "primvars:st" while discarding extra UV maps. - Removing unused primvars "displayColor" and "displayOpacity". - Xforms from added models and materials now are named according to the imported file rather than Xform_HASH_x ## [0.0.2] - 2023-08-28 - Fixing relative paths converted to absolute on the "Fix Meshes Geometry" function. - Picking best UV map available between all primvars and discarding everything else in the "Fix Meshes Geometry" - Removing unused primvars when using the "Fix Meshes Geometry". - Few more bugfixes. ## [0.0.1] - 2023-08-25 - Initial version - Added "Fix Meshes Geometry" option converting interpolation mode to "vertex". - Added "Setup for Mesh Replacement" option to export the original mesh for remodeling by external DCC tools. - Added "Add Model" option to add external authored .USD models to the mesh_HASH prim. - Added "Add Material" option to add MDL materials to the mesh_HASH prim. - Added "Original Draw Call Preservation" submenu to set. - Added "Select Source Mesh" option to quickly select the mesh_HASH prim.
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/docs/README.md
# RTX Remix Tools [ekozerski.rtxremixtools] Focusing on improving RTX Remix modding workflows, this extension is designed to speed up iteration when producing assets and mods by providing useful UI operations inside Omniverse apps like USD Composer/Create or Code. It provides some options for the "Right click" context menu to setup ideal replacement assets, as well as some converting operations to ensure assets will be compatible with the Remix runtime. It is primarily designed to operate on Remix captured scenes, so users can have instant feedbacks on what their mods are gonna look like in the game scenes and iterate faster. ## Available Tools ### Fix Meshes Geometry <i>(Operation is performed on every mesh of a USD/USDA source file and can\'t be undone)</i> Interpolation Mode - RTX Remix runtime only supports meshes with "vertex" interpolation mode, in which "points" "normals" and "uvs" arrays must have the same length, but DCC tools usually export the mesh using "faceVarying" interpolation mode. This operation reorganizes the geometry to be compatible with the runtime. - See: "Interpolation of Geometric Primitive Variables" - https://openusd.org/dev/api/class_usd_geom_primvar.html - This operation only applies for meshes inside the mods folder, not the captured ones. UV Maps - The runtime supports one single UV map per mesh, which should have one of a few known names, so this script finds many variations, picks one and renames to the standard "primvars:st", while also setting the appropriate type as "TextureCoordinate" (TexCoord2fArray / TexCoord2f[]). The other UVmaps are discarded. Unused Primvars - displayColor and displayOpacity are now removed from the mesh. ### Setup for Mesh Replacement Exports the selected mesh in a selected path, already setting up the replacements and references to work in the runtime, so for every change the user only needs to: - Open the exported mesh in it's DCC of choice, make the changes and export again (with the right settings, triangulating faces, no materials, etc.) - Back in OV, refresh the reference to see the changes in the captured scene. - Use the "Fix Meshes Geometry" again to make it Remix-compatible. - Enjoy. The original mesh is kept in case the user only wants to add more models. Make sure to delete it if the intention is to completely replace the original mesh. ### Add Model If the user already has authored USD models, this option allows to select multiple models and add to the mesh_HASH prim. ### Add Material This option allows to select a material .MDL file (AperturePBR_Opacity.mdl or AperturePBR_Translucent.mdl) to add a material prim to the mesh_HASH prim. ### Original Draw Call Preservation Allows to set the "custom int preserveOriginalDrawCall" attribute to indicate whether the runtime should be forced to render the original mesh or not. Must be set to 1 when placing custom lights or else the original mesh disappears. PS: Remember to set this to 0 if you want to make a mesh replacement and remove the original mesh. ### Select Source Mesh Quick way to select the originial source mesh_HASH prim in the scene when you have an instance prim selected. <br> ## Things to Keep in mind - In a capture scene, any changes made to the "inst_SOMEHASH_x" prims won't show up in the runtime, so every changes must be done in the "mesh_SOMEHASH" they're referencing. Whenever the user clicks a inst_ prim to perform an action like Fixing geometry or Add Model (Ex: Right clicking in the 3D viewport), this tool will try to find the referenced mesh_SOMEHASH and perform the operations in it instead. - Having that in mind, always keep an eye in the "Layers" tab to check if you have done any changes to the "instances" path. Try to delete those changes as much as possible. - The only material types that work in the runtime are described in the AperturePBR_Opacity.MDL and AperturePBR_Translucent.MDL, and every mesh must be triangulated. If you want to add a model you got from somewhere else like an asset store, make sure to convert the assets to work in the runtime. - When placing lights in the scene, it is necesssary to set an int "preserveOriginalDrawCall" to "1" in order to keep rendering the original mesh. If another layer is setting this flag somewhere and you want to replace/remove the original mesh in your own layer, you will notice that the original mesh can't be removed without setting this flag back to "0". You can do that on your own layer, set it back to "0", but make sure your layer comes on top of the other one that sets it to true.
Ekozmaster/NvidiaOmniverseRTXRemixTools/exts/ekozerski.rtxremixtools/docs/index.rst
ekozerski.rtxremixtools ############################# Example of Python only extension .. toctree:: :maxdepth: 1 README CHANGELOG .. automodule::"ekozerski.rtxremixtools" :platform: Windows-x86_64, Linux-x86_64 :members: :undoc-members: :show-inheritance: :imported-members: :exclude-members: contextmanager
rcervellione-nv/omni.rhinocompute/CONTRIBUTING.md
## Contribution Rules #### Issue Tracking * All enhancement, bugfix, or change requests must begin with the creation of a [TensorRT Issue Request](https://github.com/nvidia/TensorRT/issues). * The issue request must be reviewed by TensorRT engineers and approved prior to code review. #### Coding Guidelines - All source code contributions must strictly adhere to the [TensorRT Coding Guidelines](CODING-GUIDELINES.md). - In addition, please follow the existing conventions in the relevant file, submodule, module, and project when you add new code or when you extend/fix existing functionality. - To maintain consistency in code formatting and style, you should also run `clang-format` on the modified sources with the provided configuration file. This applies TensorRT code formatting rules to: - class, function/method, and variable/field naming - comment style - indentation - line length - Format git changes: ```bash # Commit ID is optional - if unspecified, run format on staged changes. git-clang-format --style file [commit ID/reference] ``` - Format individual source files: ```bash # -style=file : Obtain the formatting rules from .clang-format # -i : In-place modification of the processed file clang-format -style=file -i -fallback-style=none <file(s) to process> ``` - Format entire codebase (for project maintainers only): ```bash find samples plugin -iname *.h -o -iname *.c -o -iname *.cpp -o -iname *.hpp \ | xargs clang-format -style=file -i -fallback-style=none ``` - Avoid introducing unnecessary complexity into existing code so that maintainability and readability are preserved. - Try to keep pull requests (PRs) as concise as possible: - Avoid committing commented-out code. - Wherever possible, each PR should address a single concern. If there are several otherwise-unrelated things that should be fixed to reach a desired endpoint, our recommendation is to open several PRs and indicate the dependencies in the description. The more complex the changes are in a single PR, the more time it will take to review those changes. - Write commit titles using imperative mood and [these rules](https://chris.beams.io/posts/git-commit/), and reference the Issue number corresponding to the PR. Following is the recommended format for commit texts: ``` #<Issue Number> - <Commit Title> <Commit Body> ``` - Ensure that the build log is clean, meaning no warnings or errors should be present. - Ensure that all `sample_*` tests pass prior to submitting your code. - All OSS components must contain accompanying documentation (READMEs) describing the functionality, dependencies, and known issues. - See `README.md` for existing samples and plugins for reference. - All OSS components must have an accompanying test. - If introducing a new component, such as a plugin, provide a test sample to verify the functionality. - To add or disable functionality: - Add a CMake option with a default value that matches the existing behavior. - Where entire files can be included/excluded based on the value of this option, selectively include/exclude the relevant files from compilation by modifying `CMakeLists.txt` rather than using `#if` guards around the entire body of each file. - Where the functionality involves minor changes to existing files, use `#if` guards. - Make sure that you can contribute your work to open source (no license and/or patent conflict is introduced by your code). You will need to [`sign`](#signing-your-work) your commit. - Thanks in advance for your patience as we review your contributions; we do appreciate them! #### Pull Requests Developer workflow for code contributions is as follows: 1. Developers must first [fork](https://help.github.com/en/articles/fork-a-repo) the [upstream](https://github.com/nvidia/TensorRT) TensorRT OSS repository. 2. Git clone the forked repository and push changes to the personal fork. ```bash git clone https://github.com/YOUR_USERNAME/YOUR_FORK.git TensorRT # Checkout the targeted branch and commit changes # Push the commits to a branch on the fork (remote). git push -u origin <local-branch>:<remote-branch> ``` 3. Once the code changes are staged on the fork and ready for review, a [Pull Request](https://help.github.com/en/articles/about-pull-requests) (PR) can be [requested](https://help.github.com/en/articles/creating-a-pull-request) to merge the changes from a branch of the fork into a selected branch of upstream. * Exercise caution when selecting the source and target branches for the PR. Note that versioned releases of TensorRT OSS are posted to `release/` branches of the upstream repo. * Creation of a PR creation kicks off the code review process. * Atleast one TensorRT engineer will be assigned for the review. * While under review, mark your PRs as work-in-progress by prefixing the PR title with [WIP]. 4. Since there is no CI/CD process in place yet, the PR will be accepted and the corresponding issue closed only after adequate testing has been completed, manually, by the developer and/or TensorRT engineer reviewing the code. #### Signing Your Work * We require that all contributors "sign-off" on their commits. This certifies that the contribution is your original work, or you have rights to submit it under the same license, or a compatible license. * Any contribution which contains commits that are not Signed-Off will not be accepted. * To sign off on a commit you simply use the `--signoff` (or `-s`) option when committing your changes: ```bash $ git commit -s -m "Add cool feature." ``` This will append the following to your commit message: ``` Signed-off-by: Your Name <[email protected]> ``` * Full text of the DCO: ``` Developer Certificate of Origin Version 1.1 Copyright (C) 2004, 2006 The Linux Foundation and its contributors. 1 Letterman Drive Suite D4700 San Francisco, CA, 94129 Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. ``` ``` Developer's Certificate of Origin 1.1 By making a contribution to this project, I certify that: (a) The contribution was created in whole or in part by me and I have the right to submit it under the open source license indicated in the file; or (b) The contribution is based upon previous work that, to the best of my knowledge, is covered under an appropriate open source license and I have the right under that license to submit that work with modifications, whether created in whole or in part by me, under the same open source license (unless I am permitted to submit under a different license), as indicated in the file; or (c) The contribution was provided directly to me by some other person who certified (a), (b) or (c) and I have not modified it. (d) I understand and agree that this project and the contribution are public and that a record of the contribution (including all personal information I submit with it, including my sign-off) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. ```
rcervellione-nv/omni.rhinocompute/README.md
# About This is an extension designed to run in a Nvidia Omniverse application such as Create or Machinima. The extension creates a link to a Rhino.Compute Server [https://developer.rhino3d.com/guides/compute/] allowing you to run Rhino commands such as quad remesh or Grasshopper files. This is designed to be a sample to extend. there are examples for using some basic rhino command like volume and quad remesh as well as running a Grasshopper script. Use this as a starting point to integrate your grasshopper scripts and functions directly into Omniverse and create the necessary UI elements. ![Rhino Compute Image 01](exts/cerver.util.rhinocompute/data/CreateAndCompute.png "Rhino Compute and Create") # Using It - "app" - It is a folder link to the location of your *Omniverse Kit* based app. - "exts" - is the folder where you add to extension search path. (Extension Manager -> Gear Icon -> Extension Search Path). Open this folder using Visual Studio Code. It will suggest you install a few extensions that will make python experience better. Look for "cerver.util.rhinocompute" extension in extension manager inside Omniverse Create and enable it. Try applying changes to any python files, it will hot-reload and you can observe results immediately. The first time you enable it will take some time to load. this is because all of the required packages from rhino and rhino compute will be installed into your Omniverse python library via a automatic pip install. # 3rd party Libraries This project references 3rd party libraries with the following licensing Rhino.compute https://github.com/mcneel/compute.rhino3d/blob/master/LICENSE Rhino3dm https://github.com/mcneel/rhino3dm/blob/main/LICENSE Plotly https://github.com/plotly/plotly.py/blob/master/LICENSE.txt
rcervellione-nv/omni.rhinocompute/exts/cerver.util.rhinocompute/cerver/util/rhinocompute/extension.py
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved. # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. import omni.ext import omni.ui as ui import omni.usd from .RhinoComputeFunctions import RhinoFunctions, GrasshopperFunctions from .RhinoComputUtil import SaveSelectedAs3dm # Any class derived from `omni.ext.IExt` in top level module (defined in `python.modules` of `extension.toml`) will be # instantiated when extension gets enabled and `on_startup(ext_id)` will be called. Later when extension gets disabled # on_shutdown() is called. class MyExtension(omni.ext.IExt): # ext_id is current extension id. It can be used with extension manager to query additional information, like where # this extension is located on filesystem. def __init__(self): self.computeUrl="http://localhost:6500/" self.progressbarprog = 0 self.progbarwindow = None self.excludeLastGroupAsLayer = False def on_startup(self, ext_id): #print("[omni.RhinoCompute] MyExtension startup") def serverAddrChanged(addr): self.computeUrl = addr self._window = ui.Window("Rhino Compute Functions", width=300, height=400) with self._window.frame: with ui.VStack(): ui.Label("Compute Server Address") serverAddrUi = ui.StringField(height = 30) serverAddrUi.model.set_value(self.computeUrl) serverAddrUi.model.add_value_changed_fn(lambda m:serverAddrChanged(m.get_value_as_string())) with ui.CollapsableFrame("Util Functions", height = 0): with ui.VStack(): ui.Button("save sel as 3dm", clicked_fn=lambda: SaveSelectedAs3dm(self,"S:/test.3dm"), height=40) ui.Button("save all as 3dm", clicked_fn=lambda: RhinoFunctions.SaveAllAs3DM_UI(self), height=40) with ui.CollapsableFrame("Mesh Functions", height = 0): with ui.VStack(): ui.Button("Volume", clicked_fn=lambda: RhinoFunctions.MeshVolume(self), height=40) ui.Button("Mesh Bool Union", clicked_fn=lambda: RhinoFunctions.MeshBoolUnion(self), height=40) ui.Button("Quad Remesh", clicked_fn=lambda: RhinoFunctions.MeshQuadRemesh(self), height=40) ui.Button("Mesh Offset", clicked_fn=lambda: RhinoFunctions.MeshOffset(self), height=40) with ui.CollapsableFrame("Grasshopper Functions", height = 0): with ui.VStack(): ui.Button("Random Diamonds Script", clicked_fn=lambda: GrasshopperFunctions.randomDiamonds_UI(self), height=40) def on_shutdown(self): print("[omni.RhinoCompute] MyExtension shutdown")
rcervellione-nv/omni.rhinocompute/exts/cerver.util.rhinocompute/cerver/util/rhinocompute/__init__.py
from .extension import *
rcervellione-nv/omni.rhinocompute/exts/cerver.util.rhinocompute/cerver/util/rhinocompute/RhinoComputeFunctions.py
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved. # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. import string import omni.ext import omni.ui as ui from pxr import Usd, UsdGeom import omni.usd import carb.events import omni.kit.app import os import json import time omni.kit.pipapi.install("rhino3dm") from rhino3dm import * omni.kit.pipapi.install("compute-rhino3d") import compute_rhino3d.Util import compute_rhino3d.Mesh import compute_rhino3d.Grasshopper as gh from .RhinoComputUtil import * omni.kit.pipapi.install("plotly==5.4.0") import plotly.graph_objects as go class RhinoFunctions: def ComputeServerUrl(self): return self.computeUrl def MeshVolume(self): #add the compute server location compute_rhino3d.Util.url = self.computeUrl #convert selected items to rhino mesh meshes = convertSelectedUsdMeshToRhino() vols = [] names = [] rhinoMeshes = [] #for each mesh compute the volume and then add the volume and name to a list for m in meshes: rhinoMeshes.append(m["Mesh"]) vol = compute_rhino3d.Mesh.Volume(m["Mesh"]) vols.append(vol) names.append(m["Name"]) #use plotly to plot the volumes as a pie chart fig = go.Figure( data=[go.Pie(values=vols, labels=names)], layout_title_text="the Volumes" ) fig.show() def MeshBoolUnion(self) -> None: #add the compute server location compute_rhino3d.Util.url = self.computeUrl #convert selected items to rhino mesh meshes = convertSelectedUsdMeshToRhino() #for each mesh compute the bool union rhinoMeshes = [] for m in meshes: rhinoMeshes.append(m["Mesh"]) rhinoMeshes = compute_rhino3d.Mesh.CreateBooleanUnion(rhinoMeshes) #add to the stage after converting back from rhino to USD mesh #ToDo: add UI to define prim path and names ct=0 for rm in rhinoMeshes: RhinoMeshToUsdMesh("/World/rhinoComputed/",f"BoolUnion_{ct}",rm) def MeshQuadRemesh(self)-> None: compute_rhino3d.Util.url = self.computeUrl meshes = convertSelectedUsdMeshToRhino() #setup all the params for quad remesh #ToDo: make this a UI for user parameters = { 'AdaptiveQuadCount': True, 'AdaptiveSize': 50.0, 'DetectHardEdges': True, 'GuideCurveInfluence': 0, 'PreserveMeshArrayEdgesMode': 0, 'TargetQuadCount': 2000 } names = [] rhinoMeshes = [] for m in meshes: weldVerts = compute_rhino3d.Mesh.Weld(m["Mesh"],0.5) qrm =compute_rhino3d.Mesh.QuadRemesh(weldVerts,parameters) name = m["Name"] if qrm is not None: rhinoMeshes.append(qrm) names.append(name) RhinoMeshToUsdMesh("/World/rhinoComputed/",name+"_QuadRemesh",qrm) else: warning(f"QuadRemesh Failed on {name}") def MeshWeld(self, tol)-> None: compute_rhino3d.Util.url = self.computeUrl meshes = convertSelectedUsdMeshToRhino() names = [] rhinoMeshes = [] for m in meshes: weldVerts = compute_rhino3d.Mesh.Weld(m["Mesh"],tol) name = m["Name"] if weldVerts is not None: rhinoMeshes.append(weldVerts) names.append(name) RhinoMeshToUsdMesh("/World/rhinoComputed/",name+"_Weld",weldVerts) else: warning(f"Weld Failed on {name}") def MeshOffset(self)-> None: compute_rhino3d.Util.url = self.computeUrl meshes = convertSelectedUsdMeshToRhino() names = [] rhinoMeshes = [] for m in meshes: macf = compute_rhino3d.Mesh.Offset1(m["Mesh"],1,True) rhinoMeshes.append(macf) name = m["Name"] names.append(name) RhinoMeshToUsdMesh("/World/rhinoComputed/",name+"_offset",macf) def SaveAllAs3DM_UI(self): window_flags = ui.WINDOW_FLAGS_NO_SCROLLBAR #window_flags |= ui.WINDOW_FLAGS_NO_TITLE_BAR self.export3dmwindow = ui.Window("Export Stage As 3DM", width=300, height=130, flags=window_flags) with self.export3dmwindow.frame: with ui.VStack(): with ui.HStack(): ui.Label("Path", width=50, height = 25) path = ui.StringField( height = 25, tooltip = "Set the location and name of the file i.e c:/temp/myRhinofile.3dm") with ui.HStack( height = 35): def exLastGrpAsLayCb_changed(self, val): self.excludeLastGroupAsLayer = val print(val) exLastGrpAsLayCb = ui.CheckBox(width = 30) exLastGrpAsLayCb.model.add_value_changed_fn(lambda cb: exLastGrpAsLayCb_changed(self,cb.get_value_as_bool() ) ) ui.Label("Exlude last group as layer", width=50, height = 15) def exportbt(): SaveAllas3DM(self,path.model.get_value_as_string()) ui.Line() ui.Button("Export", clicked_fn=lambda: exportbt(), height=25) class GrasshopperFunctions: def randomDiamonds(self,uCt,vCt,rrA,rrB): compute_rhino3d.Util.url = self.computeUrl ghFile = os.path.dirname(os.path.dirname(__file__)) + "/rhinocompute/gh/randomDiamonds.ghx" selectedMeshes = convertSelectedUsdMeshToRhino() inputMesh = selectedMeshes[0]["Mesh"] # create list of input trees ghMesh = json.dumps(inputMesh.Encode()) mesh_tree = gh.DataTree("baseMesh") mesh_tree.Append([0], [ghMesh]) srfU_tree = gh.DataTree("srfU") srfU_tree.Append([0], [uCt]) srfV_tree = gh.DataTree("srfV") srfV_tree.Append([0], [vCt]) rrA_tree = gh.DataTree("RR_A") rrA_tree.Append([0], [rrA]) rrB_tree = gh.DataTree("RR_B") rrB_tree.Append([0], [rrB]) inputs = [mesh_tree, srfU_tree, srfV_tree, rrA_tree, rrB_tree] results = gh.EvaluateDefinition(ghFile, inputs) # decode results data = results['values'][0]['InnerTree']['{0}'] outMeshes = [rhino3dm.CommonObject.Decode(json.loads(item['data'])) for item in data] ct = 0 for m in outMeshes: RhinoMeshToUsdMesh("/World",f"/randomDiamonds/randomDiamonds_{ct}",m) ct+=1 def randomDiamonds_UI(self): def run(uCt,vCt,rrA,rrB): GrasshopperFunctions.randomDiamonds(self,uCt, vCt, rrA,rrB) #window_flags = ui.WINDOW_FLAGS_NO_RESIZE sliderStyle = {"border_radius":15, "background_color": 0xFFDDDDDD, "secondary_color":0xFFAAAAAA, "color":0xFF111111, "margin":3} window_flags = ui.WINDOW_FLAGS_NO_SCROLLBAR self.theWindow = ui.Window("Random Diamonds", width=300, height=200, flags=window_flags) with self.theWindow.frame: with ui.VStack(): with ui.HStack(): ui.Label("U Ct", width=40) srfU = ui.IntSlider(height= 20, min=1, max=50, style= sliderStyle ) with ui.HStack(): ui.Label("V Ct", width=40) srfV = ui.IntSlider(height= 20, min=1, max=50, style= sliderStyle ) with ui.HStack(): ui.Label("min D", width=40) rrA = ui.FloatSlider(height= 20, min=0.1, max=150, style= sliderStyle ) with ui.HStack(): ui.Label("max D", width=40) rrB = ui.FloatSlider(height= 20, min=0.1, max=150, style= sliderStyle ) srfU.model.set_value(4) srfV.model.set_value(4) rrA.model.set_value(4) rrB.model.set_value(75) srfU.model.add_value_changed_fn(lambda m:run(srfU.model.get_value_as_int(),srfV.model.get_value_as_int(),rrA.model.get_value_as_float(),rrB.model.get_value_as_float())) srfV.model.add_value_changed_fn(lambda m:run(srfU.model.get_value_as_int(),srfV.model.get_value_as_int(),rrA.model.get_value_as_float(),rrB.model.get_value_as_float())) rrA.model.add_value_changed_fn(lambda m:run(srfU.model.get_value_as_int(),srfV.model.get_value_as_int(),rrA.model.get_value_as_float(),rrB.model.get_value_as_float())) rrB.model.add_value_changed_fn(lambda m:run(srfU.model.get_value_as_int(),srfV.model.get_value_as_int(),rrA.model.get_value_as_float(),rrB.model.get_value_as_float())) ui.Line(height=10) ui.Button("Run >>", clicked_fn=lambda: GrasshopperFunctions.randomDiamonds(self, srfU.model.get_value_as_int(), srfV.model.get_value_as_int(), rrA.model.get_value_as_float(), rrB.model.get_value_as_float(), ), height=30)
rcervellione-nv/omni.rhinocompute/exts/cerver.util.rhinocompute/cerver/util/rhinocompute/RhinoComputUtil.py
# Copyright (c) 2022 NVIDIA CORPORATION. All rights reserved. # NVIDIA CORPORATION and its licensors retain all intellectual property # and proprietary rights in and to this software, related documentation # and any modifications thereto. Any use, reproduction, disclosure or # distribution of this software and related documentation without an express # license agreement from NVIDIA CORPORATION is strictly prohibited. import compute_rhino3d.Util import compute_rhino3d.Mesh import compute_rhino3d.Grasshopper as gh import rhino3dm import json import omni.ext import omni.ui as ui from pxr import Usd, UsdGeom, Gf import omni.usd import asyncio def convertSelectedUsdMeshToRhino(): context = omni.usd.get_context() stage = omni.usd.get_context().get_stage() prims = [stage.GetPrimAtPath(m) for m in context.get_selection().get_selected_prim_paths() ] #filter out prims that are not mesh selected_prims = [ prim for prim in prims if UsdGeom.Mesh(prim)] #setup var to hold the mesh, its name in the dict sDict = [] #add the converted prims to the dict for m in selected_prims: sDict.append({"Name": m.GetName(), "Mesh":UsdMeshToRhinoMesh(m)}) return sDict def UsdMeshToRhinoMesh(usdMesh): #array for the mesh items vertices = [] faces = [] #get the USD points points = UsdGeom.Mesh(usdMesh).GetPointsAttr().Get() #setup the items needed to deal with world and local transforms xform_cache = UsdGeom.XformCache() mtrx_world = xform_cache.GetLocalToWorldTransform(usdMesh) #create the rhino mesh mesh = rhino3dm.Mesh() #convert the USD points to rhino points for p in points: world_p = mtrx_world.Transform(p) mesh.Vertices.Add(world_p[0],world_p[1],world_p[2]) #faces we can extend directly into the aray becaue they are just ints faces.extend( UsdGeom.Mesh(usdMesh).GetFaceVertexIndicesAttr().Get()) faceCount = UsdGeom.Mesh(usdMesh).GetFaceVertexCountsAttr().Get() ct = 0 #add the face verts, USD uses a flat list of ints so we need to deal with #3 or 4 sided faces. USD supports ngons but that is not accounted for #ToDo: Deal with ngons for i in range(0,len(faceCount)): fc=faceCount[i] if fc is 3: mesh.Faces.AddFace(faces[ct], faces[ct+1], faces[ct+2]) if fc is 4: mesh.Faces.AddFace(faces[ct], faces[ct+1], faces[ct+2], faces[ct+3]) ct+=fc #compute normals, i dont use the USD normals here but you could mesh.Normals.ComputeNormals() mesh.Compact() return mesh def save_stage(): stage = omni.usd.get_context().get_stage() stage.GetRootLayer().Save() omni.client.usd_live_process() def RhinoMeshToUsdMesh( rootUrl, meshName, rhinoMesh: rhino3dm.Mesh , primPath=None): #get the stage stage = omni.usd.get_context().get_stage() # Create the geometry inside of "Root" meshPrimPath = rootUrl + meshName mesh = UsdGeom.Mesh.Define(stage, meshPrimPath) # Add all of the vertices points = [] for i in range(0,len(rhinoMesh.Vertices)): v = rhinoMesh.Vertices[i] points.append(Gf.Vec3f(v.X, v.Y, v.Z)) mesh.CreatePointsAttr(points) # Calculate indices for each triangle faceIndices = [] faceVertexCounts = [] for i in range(0, rhinoMesh.Faces.Count): fcount=3 curf = rhinoMesh.Faces[i] faceIndices.append(curf[0]) faceIndices.append(curf[1]) faceIndices.append(curf[2]) if curf[2] != curf[3]: faceIndices.append(curf[3]) fcount=4 #print(f"{fcount} : {curf}") faceVertexCounts.append(fcount) mesh.CreateFaceVertexIndicesAttr(faceIndices) mesh.CreateFaceVertexCountsAttr(faceVertexCounts) # Add vertex normals meshNormals = [] for n in rhinoMesh.Normals: meshNormals.append(Gf.Vec3f(n.X,n.Y,n.Z)) mesh.CreateNormalsAttr(meshNormals) def SaveRhinoFile(rhinoMeshes, path): model = rhino3dm.File3dm() [ model.Objects.AddMesh(m) for m in rhinoMeshes] model.Write(path) def SaveSelectedAs3dm(self,path): selectedMeshes = convertSelectedUsdMeshToRhino() meshobj = [d['Mesh'] for d in selectedMeshes] SaveRhinoFile(meshobj, path) def SaveAllas3DM(self, path): #get the stage stage = omni.usd.get_context().get_stage() #get all prims that are meshes meshPrims = [stage.GetPrimAtPath(prim.GetPath()) for prim in stage.Traverse() if UsdGeom.Mesh(prim)] #make a rhino file rhinoFile = rhino3dm.File3dm() uniqLayers = {} #figure out how many elements there are (to implament progress bar in future) numPrims = len(meshPrims) curPrim = 0 #loop over all the meshes for mp in meshPrims: #convert from usd mesh to rhino mesh rhinoMesh = UsdMeshToRhinoMesh(mp) objName = mp.GetName() rhinoAttr = rhino3dm.ObjectAttributes() dataOnParent = False #get the properties on the prim bimProps = None parentPrim = mp.GetParent() #see if this prim has BIM properties (from revit) if parentPrim: bimProps = mp.GetPropertiesInNamespace("BIM") dataOnParent = False #see if this prims parent has BIM properties (from revit) if not bimProps: bimProps = parentPrim.GetPropertiesInNamespace("BIM") dataOnParent = True #if no bim properties just add regular ones if not bimProps : bimProps = mp.GetProperties() dataOnParent = False for p in bimProps: try: pName = p.GetBaseName() var = p.Get() rhinoAttr.SetUserString(pName, str(var)) except Exception : pass # get the prims path and use that to create nested layers in rhino primpath = str(mp.GetPath()) sepPrimPath = primpath.split('/') sepPrimPath.pop(0) sepPrimPath.pop() # this will ajust the layer structure if the data is from the revit connector # or if you just want to prune the last group in the export dialogue if dataOnParent or self.excludeLastGroupAsLayer: sepPrimPath.pop() nestedLayerName = '::'.join(sepPrimPath) ct=0 curLayer = "" #loop over all the prim paths to created the nested layers in rhino for pp in sepPrimPath: if ct == 0: curLayer += pp else: curLayer += f"::{pp}" #check if the layer exists, if not make it if not curLayer in uniqLayers : layer = rhino3dm.Layer() if ct>0: prevLayer = curLayer.split('::') prevLayer.pop() prevLayer = '::'.join(prevLayer) layer.ParentLayerId = rhinoFile.Layers.FindIndex(uniqLayers[prevLayer]).Id layer.Color = (255,255,255,255) layer.Name = pp idx = rhinoFile.Layers.Add(layer) uniqLayers[curLayer]= int(idx) ct+=1 rhinoAttr.Name = objName #print(str(uniqLayers[nestedLayerName])) rhinoAttr.LayerIndex = int(str(uniqLayers[nestedLayerName])) #add the mesh and its attributes to teh rhino file rhinoFile.Objects.AddMesh(rhinoMesh, rhinoAttr) curPrim += 1 self.progressbarprog = curPrim/numPrims #save it all rhinoFile.Write(path) print("completed saving")
rcervellione-nv/omni.rhinocompute/exts/cerver.util.rhinocompute/config/extension.toml
[package] # Semantic Versionning is used: https://semver.org/ version = "1.0.3" # The title and description fields are primarily for displaying extension info in UI title = "Rhino Compute for Omniverse" description="Omniverse intergration with a rhino.compute server" # Path (relative to the root) or content of readme markdown file for UI. readme = "../../README.md" # URL of the extension source repository. repository = "https://github.com/rcervellione-nv/omni.rhinocompute" # One of categories for UI. category = "Utility" # Keywords for the extension keywords = ["kit", "Rhino", "Compute"] # Icon to show in the extension manager icon = "data/computeTerminal.png" # Preview to show in the extension manager preview_image = "data/CreateAndCompute.png" # Use omni.ui to build simple UI [dependencies] "omni.kit.uiapp" = {} # Main python module this extension provides, it will be publicly available as "import omni.hello.world". [[python.module]] name = "cerver.util.rhinocompute"
vinjn/llm-metahuman/README.md
# LLM MetaHuman LLM MetaHuman is an open solution for AI-powered photorealistic digital humans. ## Preparation steps - Install [Omniverse Launcher](https://www.nvidia.com/en-us/omniverse/download/) - Inside Omniverse Launcher, Install `Audio2Face`. - Install [Epic Games Store](https://store.epicgames.com/en-US/) - Inside Epic Games Store, Install Unreal Engine 5.x. - Follow [Audio2Face to UE Live Link Plugin](https://docs.omniverse.nvidia.com/audio2face/latest/user-manual/livelink-ue-plugin.html) to connect Audi2Face to Unreal Engine. ## Launch Audio2Face headless ## Launch llm.py ## Launch Unreal Engine Metahuman
vinjn/llm-metahuman/tools/kit-log.bat
start "" "%userprofile%\.nvidia-omniverse\logs\Kit\kit"
vinjn/llm-metahuman/tools/code-log.bat
start "" "%userprofile%\.nvidia-omniverse\logs\Kit\Code"
vinjn/llm-metahuman/tools/create-log.bat
start "" "%userprofile%\.nvidia-omniverse\logs\Kit\Create.Next"
vinjn/llm-metahuman/tools/kill-kit.bat
taskkill /IM kit.exe /F
vinjn/llm-metahuman/audio-client/run.bat
@REM pip install protobuf==3.17.3 grpcio soundfile python test_client.py %1 /World/audio2face/PlayerStreaming /World/audio2gesture/PlayerStreaming
vinjn/llm-metahuman/audio-client/audio2face_pb2_grpc.py
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" import grpc import audio2face_pb2 as audio2face__pb2 class Audio2FaceStub(object): """Missing associated documentation comment in .proto file.""" def __init__(self, channel): """Constructor. Args: channel: A grpc.Channel. """ self.PushAudio = channel.unary_unary( '/nvidia.audio2face.Audio2Face/PushAudio', request_serializer=audio2face__pb2.PushAudioRequest.SerializeToString, response_deserializer=audio2face__pb2.PushAudioResponse.FromString, ) self.PushAudioStream = channel.stream_unary( '/nvidia.audio2face.Audio2Face/PushAudioStream', request_serializer=audio2face__pb2.PushAudioStreamRequest.SerializeToString, response_deserializer=audio2face__pb2.PushAudioStreamResponse.FromString, ) class Audio2FaceServicer(object): """Missing associated documentation comment in .proto file.""" def PushAudio(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def PushAudioStream(self, request_iterator, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def add_Audio2FaceServicer_to_server(servicer, server): rpc_method_handlers = { 'PushAudio': grpc.unary_unary_rpc_method_handler( servicer.PushAudio, request_deserializer=audio2face__pb2.PushAudioRequest.FromString, response_serializer=audio2face__pb2.PushAudioResponse.SerializeToString, ), 'PushAudioStream': grpc.stream_unary_rpc_method_handler( servicer.PushAudioStream, request_deserializer=audio2face__pb2.PushAudioStreamRequest.FromString, response_serializer=audio2face__pb2.PushAudioStreamResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( 'nvidia.audio2face.Audio2Face', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) # This class is part of an EXPERIMENTAL API. class Audio2Face(object): """Missing associated documentation comment in .proto file.""" @staticmethod def PushAudio(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.unary_unary(request, target, '/nvidia.audio2face.Audio2Face/PushAudio', audio2face__pb2.PushAudioRequest.SerializeToString, audio2face__pb2.PushAudioResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) @staticmethod def PushAudioStream(request_iterator, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): return grpc.experimental.stream_unary(request_iterator, target, '/nvidia.audio2face.Audio2Face/PushAudioStream', audio2face__pb2.PushAudioStreamRequest.SerializeToString, audio2face__pb2.PushAudioStreamResponse.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
vinjn/llm-metahuman/audio-client/audio2face_pb2.py
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: audio2face.proto """Generated protocol buffer code.""" from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x61udio2face.proto\x12\x11nvidia.audio2face\"{\n\x10PushAudioRequest\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x12\n\nsamplerate\x18\x02 \x01(\x05\x12\x12\n\naudio_data\x18\x03 \x01(\x0c\x12(\n block_until_playback_is_finished\x18\x04 \x01(\x08\"5\n\x11PushAudioResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t\"\x85\x01\n\x16PushAudioStreamRequest\x12@\n\x0cstart_marker\x18\x01 \x01(\x0b\x32(.nvidia.audio2face.PushAudioRequestStartH\x00\x12\x14\n\naudio_data\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request\"l\n\x15PushAudioRequestStart\x12\x15\n\rinstance_name\x18\x01 \x01(\t\x12\x12\n\nsamplerate\x18\x02 \x01(\x05\x12(\n block_until_playback_is_finished\x18\x03 \x01(\x08\";\n\x17PushAudioStreamResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\x12\x0f\n\x07message\x18\x02 \x01(\t2\xd4\x01\n\nAudio2Face\x12X\n\tPushAudio\x12#.nvidia.audio2face.PushAudioRequest\x1a$.nvidia.audio2face.PushAudioResponse\"\x00\x12l\n\x0fPushAudioStream\x12).nvidia.audio2face.PushAudioStreamRequest\x1a*.nvidia.audio2face.PushAudioStreamResponse\"\x00(\x01\x62\x06proto3') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'audio2face_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _PUSHAUDIOREQUEST._serialized_start=39 _PUSHAUDIOREQUEST._serialized_end=162 _PUSHAUDIORESPONSE._serialized_start=164 _PUSHAUDIORESPONSE._serialized_end=217 _PUSHAUDIOSTREAMREQUEST._serialized_start=220 _PUSHAUDIOSTREAMREQUEST._serialized_end=353 _PUSHAUDIOREQUESTSTART._serialized_start=355 _PUSHAUDIOREQUESTSTART._serialized_end=463 _PUSHAUDIOSTREAMRESPONSE._serialized_start=465 _PUSHAUDIOSTREAMRESPONSE._serialized_end=524 _AUDIO2FACE._serialized_start=527 _AUDIO2FACE._serialized_end=739 # @@protoc_insertion_point(module_scope)
vinjn/llm-metahuman/audio-client/gen_protoc.py
import os import subprocess ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) proto_src_root = os.path.normpath(os.path.join(ROOT_DIR, "proto/")) proto_dst_root = os.path.normpath(os.path.join(ROOT_DIR, ".")) proto_fpath = os.path.normpath(os.path.join(ROOT_DIR, "proto", "audio2face.proto")) cmd = [ "python", "-m", "grpc_tools.protoc", "-I", f"{proto_src_root}", f"--python_out={proto_dst_root}", f"--grpc_python_out={proto_dst_root}", f"{proto_fpath}", ] print(cmd) subprocess.call(cmd)
vinjn/llm-metahuman/audio-client/test_client.py
""" This demo script shows how to send audio data to Audio2Face Streaming Audio Player via gRPC requests. There are two options: * Send the whole track at once using PushAudioRequest() * Send the audio chunks seuqntially in a stream using PushAudioStreamRequest() For the second option this script emulates the stream of chunks, generated by splitting an input WAV audio file. But in a real application such stream of chunks may be aquired from some other streaming source: * streaming audio via internet, streaming Text-To-Speech, etc gRPC protocol details could be find in audio2face.proto """ import sys import time import audio2face_pb2 import audio2face_pb2_grpc import grpc import numpy as np import soundfile def push_audio_track(url, audio_data, samplerate, instance_names): """ This function pushes the whole audio track at once via PushAudioRequest() PushAudioRequest parameters: * audio_data: bytes, containing audio data for the whole track, where each sample is encoded as 4 bytes (float32) * samplerate: sampling rate for the audio data * instance_names: prim path of the Audio2Face Streaming Audio Player on the stage, were to push the audio data * block_until_playback_is_finished: if True, the gRPC request will be blocked until the playback of the pushed track is finished The request is passed to PushAudio() """ block_until_playback_is_finished = True # ADJUST for instance_name in instance_names: with grpc.insecure_channel(url) as channel: stub = audio2face_pb2_grpc.Audio2FaceStub(channel) request = audio2face_pb2.PushAudioRequest() request.audio_data = audio_data.astype(np.float32).tobytes() request.samplerate = samplerate request.instance_name = instance_name request.block_until_playback_is_finished = block_until_playback_is_finished print("Sending audio data...") response = stub.PushAudio(request) if response.success: print("SUCCESS") else: print(f"ERROR: {response.message}") print("Closed channel") def push_audio_track_stream(url, audio_data, samplerate, instance_names): """ This function pushes audio chunks sequentially via PushAudioStreamRequest() The function emulates the stream of chunks, generated by splitting input audio track. But in a real application such stream of chunks may be aquired from some other streaming source. The first message must contain start_marker field, containing only meta information (without audio data): * samplerate: sampling rate for the audio data * instance_names: prim path of the Audio2Face Streaming Audio Player on the stage, were to push the audio data * block_until_playback_is_finished: if True, the gRPC request will be blocked until the playback of the pushed track is finished (after the last message) Second and other messages must contain audio_data field: * audio_data: bytes, containing audio data for an audio chunk, where each sample is encoded as 4 bytes (float32) All messages are packed into a Python generator and passed to PushAudioStream() """ chunk_size = samplerate // 10 # ADJUST sleep_between_chunks = 0.04 # ADJUST block_until_playback_is_finished = True # ADJUST with grpc.insecure_channel(url) as channel: print("Channel creadted") stub = audio2face_pb2_grpc.Audio2FaceStub(channel) for instance_name in instance_names: def make_generator(): start_marker = audio2face_pb2.PushAudioRequestStart( samplerate=samplerate, instance_name=instance_name, block_until_playback_is_finished=block_until_playback_is_finished, ) # At first, we send a message with start_marker yield audio2face_pb2.PushAudioStreamRequest(start_marker=start_marker) # Then we send messages with audio_data for i in range(len(audio_data) // chunk_size + 1): time.sleep(sleep_between_chunks) chunk = audio_data[i * chunk_size : i * chunk_size + chunk_size] yield audio2face_pb2.PushAudioStreamRequest(audio_data=chunk.astype(np.float32).tobytes()) request_generator = make_generator() print("Sending audio data...") response = stub.PushAudioStream(request_generator) if response.success: print("SUCCESS") else: print(f"ERROR: {response.message}") print("Channel closed") def main(): """ This demo script shows how to send audio data to Audio2Face Streaming Audio Player via gRPC requests. There two options: * Send the whole track at once using PushAudioRequest() * Send the audio chunks seuqntially in a stream using PushAudioStreamRequest() For the second option this script emulates the stream of chunks, generated by splitting an input WAV audio file. But in a real application such stream of chunks may be aquired from some other streaming source: * streaming audio via internet, streaming Text-To-Speech, etc gRPC protocol details could be find in audio2face.proto """ if len(sys.argv) < 3: print("Format: python test_client.py PATH_TO_WAV INSTANCE_NAME") return # Sleep time emulates long latency of the request sleep_time = 0.0 # ADJUST # URL of the Audio2Face Streaming Audio Player server (where A2F App is running) url = "localhost:50051" # ADJUST # Local input WAV file path audio_fpath = sys.argv[1] # Prim path of the Audio2Face Streaming Audio Player on the stage (were to push the audio data) instance_names = sys.argv[2:] data, samplerate = soundfile.read(audio_fpath, dtype="float32") # Only Mono audio is supported if len(data.shape) > 1: data = np.average(data, axis=1) print(f"Sleeping for {sleep_time} seconds") time.sleep(sleep_time) if 0: # ADJUST # Push the whole audio track at once push_audio_track(url, data, samplerate, instance_names) else: # Emulate audio stream and push audio chunks sequentially push_audio_track_stream(url, data, samplerate, instance_names) if __name__ == "__main__": main()
vinjn/llm-metahuman/audio-client/llm.py
from openai import OpenAI from pydub import AudioSegment import gradio as gr import requests import os from litellm import completion import time import threading import queue import gradio_client as gc # XXX: increase requests speed # https://stackoverflow.com/a/72440253 requests.packages.urllib3.util.connection.HAS_IPV6 = False args = None CWD = os.getcwd() print("CWD:", CWD) VOICE_ACTORS = ["nova", "alloy", "echo", "fable", "onyx", "shimmer"] def timing_decorator(func): def wrapper(*args, **kwargs): start_time = time.time() result = func(*args, **kwargs) end_time = time.time() elapsed_time = end_time - start_time print(f"{func.__name__} cost: {elapsed_time:.2f} seconds.") return result return wrapper class A2fInstance: files_to_delete = [] instaces = [] def __init__(self, index) -> None: self.SERVICE_HEALTHY = False self.LIVELINK_SERVICE_HEALTHY = False self.index = index @timing_decorator def post(self, end_point, data=None, verbose=True): if not self.SERVICE_HEALTHY: return None if verbose: print(f"++ {end_point}") api_url = f"{self.base_url}/{end_point}" try: response = requests.post(api_url, json=data) if response and response.status_code == 200: if verbose: print(response.json()) return response.json() else: if verbose: print(f"Error: {response.status_code} - {response.text}") return {"Error": response.status_code, "Reason": response.text} except Exception as e: print(e) self.SERVICE_HEALTHY = False return None @timing_decorator def get(self, end_point, data=None, verbose=True): if not self.SERVICE_HEALTHY: return None if verbose: print(f"++ {end_point}") api_url = f"{self.base_url}/{end_point}" try: response = requests.get(api_url, json=data) if response.status_code == 200: if verbose: print(response.json()) return response.json() else: if verbose: print(f"Error: {response.status_code} - {response.text}") return {"Error": response.status_code, "Reason": response.text} except Exception as e: print(e) self.SERVICE_HEALTHY = False return None def player_setlooping(self, flag=True): self.post( "A2F/Player/SetLooping", {"a2f_player": args.a2f_player_id, "loop_audio": flag}, ) def player_play(self): self.post("A2F/Player/Play", {"a2f_player": args.a2f_player_id}) def player_pause(self): self.post("A2F/Player/Pause", {"a2f_player": args.a2f_player_id}) def player_setrootpath(self, dir_path): self.post( "A2F/Player/SetRootPath", {"a2f_player": args.a2f_player_id, "dir_path": dir_path}, ) def player_settrack(self, file_name): self.post( "A2F/Player/SetTrack", {"a2f_player": args.a2f_player_id, "file_name": file_name}, ) def player_gettracks(self): self.post("A2F/Player/GetTracks", {"a2f_player": args.a2f_player_id}) def player_gettime(self): response = self.post( "A2F/Player/GetTime", {"a2f_player": args.a2f_player_id}, False ) if response and response["status"] == "OK": return response["result"] else: return 0 def player_getrange(self): response = self.post( "A2F/Player/GetRange", {"a2f_player": args.a2f_player_id}, False ) if response and response["status"] == "OK": return response["result"]["work"] else: return (0, 0) def generatekeys(self): self.post("A2F/A2E/GenerateKeys", {"a2f_instance": args.a2f_instance_id}) def ActivateStreamLivelink(self, flag): self.post( "A2F/Exporter/ActivateStreamLivelink", {"node_path": args.a2f_livelink_id, "value": flag}, ) def IsStreamLivelinkConnected(self): response = self.post( "A2F/Exporter/IsStreamLivelinkConnected", {"node_path": args.a2f_livelink_id}, ) if response and response["status"] == "OK": return response["result"] else: return False def enable_audio_stream(self, flag): self.post( "A2F/Exporter/SetStreamLivelinkSettings", { "node_path": args.a2f_livelink_id, "values": {"enable_audio_stream": flag}, }, ) def set_livelink_ports( self, livelink_host, livelink_subject, livelink_port, livelink_audio_port, ): self.post( "A2F/Exporter/SetStreamLivelinkSettings", { "node_path": args.a2f_livelink_id, "values": { "livelink_host": livelink_host, "livelink_subject": livelink_subject, "livelink_port": livelink_port, "audio_port": livelink_audio_port, }, }, ) def get_preprocessing(self): response = self.post( "A2F/PRE/GetSettings", {"a2f_instance": args.a2f_instance_id}, ) if response and response["status"] == "OK": return response["result"] else: return {} def set_preprocessing(self, settings): settings["a2f_instance"] = args.a2f_instance_id self.post("A2F/PRE/SetSettings", settings) def get_postprocessing(self): response = self.post( "A2F/POST/GetSettings", {"a2f_instance": args.a2f_instance_id}, ) if response and response["status"] == "OK": return response["result"] else: return {} def set_postprocessing(self, settings): self.post( "A2F/POST/SetSettings", {"a2f_instance": args.a2f_instance_id, "settings": settings}, ) def setup(self): self.base_url = f"http://{args.a2f_host}:{args.a2f_port+self.index}" self.tts_voice = args.tts_voice if self.index > 0: # TODO: make it elegant self.tts_voice = VOICE_ACTORS[self.index % len(VOICE_ACTORS)] # always ping SERVICE_HEALTHY again in setup() self.SERVICE_HEALTHY = True self.ActivateStreamLivelink(True) if not self.SERVICE_HEALTHY: return self.player_setrootpath(CWD) self.player_setlooping(False) self.LIVELINK_SERVICE_HEALTHY = self.IsStreamLivelinkConnected() if not self.LIVELINK_SERVICE_HEALTHY: return self.enable_audio_stream(True) self.set_livelink_ports( args.livelink_host, f"{args.livelink_subject}-{self.index}", args.livelink_port + 10 * self.index, args.livelink_audio_port + 10 * self.index, ) pre_settings = self.get_preprocessing() pre_settings["prediction_delay"] = 0 pre_settings["blink_interval"] = 1.5 self.set_preprocessing(pre_settings) post_settings = self.get_postprocessing() post_settings["skin_strength"] = 1.3 self.set_postprocessing(post_settings) A2fInstance.instaces = [] openai_client = OpenAI() gc_client: gc.Client = None chat_ui: gr.ChatInterface = None def run_single_pipeline(a2f, answer, a2f_peer=None): global stop_current_a2f_play if not a2f_peer: a2f_peer = a2f # print(answer) mp3_file = text_to_mp3(answer, a2f.tts_voice) wav_file = mp3_to_wav(mp3_file) duration = a2f_peer.player_getrange()[1] position = a2f_peer.player_gettime() while position > 0 and position < duration: print(position, duration) if stop_current_a2f_play: print("stop_current_a2f_play") stop_current_a2f_play = False return time.sleep(1) position = a2f_peer.player_gettime() print("z") time.sleep(1) a2f.player_setrootpath(CWD) a2f.player_settrack(wav_file) # a2f_generatekeys() a2f.player_play() for file in A2fInstance.files_to_delete: try: os.remove(file) except Exception: pass A2fInstance.files_to_delete.clear() A2fInstance.files_to_delete.append(mp3_file) A2fInstance.files_to_delete.append(wav_file) current_speaker = -1 @timing_decorator def run_pipeline(answer): if args.a2f_instance_count == 1: run_single_pipeline(A2fInstance.instaces[0], answer) return global current_speaker if answer.startswith("("): current_speaker = -1 elif answer.startswith("A:"): current_speaker = 0 answer = answer[2:] elif answer.startswith("B:"): current_speaker = 1 answer = answer[2:] if current_speaker < 0 or current_speaker >= args.a2f_instance_count: return a2f = A2fInstance.instaces[current_speaker] if not a2f.SERVICE_HEALTHY: return run_single_pipeline(a2f, answer) @timing_decorator def text_to_mp3(text, voice): response = openai_client.audio.speech.create( model=args.tts_model, voice=voice, speed=args.tts_speed, input=text, ) timestamp = time.time() mp3_filename = f"{timestamp}.mp3" response.stream_to_file(mp3_filename) return mp3_filename @timing_decorator def mp3_to_wav(mp3_filename): sound = AudioSegment.from_mp3(mp3_filename) sound = sound.set_frame_rate(22050) wav_filename = f"{mp3_filename}.wav" sound.export(wav_filename, format="wav") return wav_filename @timing_decorator def get_completion(chat_history): response = completion( model=args.llm_model, messages=chat_history, api_base=args.llm_url, stream=args.llm_streaming, ) print(response) return response q = queue.Queue() cleanup_queue = False stop_current_a2f_play = False def pipeline_worker(): while True: print("--------------------------") global cleanup_queue global stop_current_a2f_play if cleanup_queue: while not q.empty(): item = q.get() q.task_done() if item == "cleanup_queue_token": break cleanup_queue = False stop_current_a2f_play = True item = q.get() if item == "cleanup_queue_token": continue print(f"Begin: {item}") run_pipeline(item) print(f"End: {item}") q.task_done() def talk_to_peer(message): if not gc_client: return result = gc_client.predict( message, api_name="/chat" # str in 'Message' Textbox component ) print(f"from peer: {result}") # chat_ui.textbox.submit(None, [result, result]) # chat_ui.textbox.submit() def predict(message, history): print("==========================") if message == "setup": str = "" for a2f in A2fInstance.instaces: a2f.setup() str += f"A2F running: {a2f.SERVICE_HEALTHY}\n" str += f"Live Link running: {a2f.LIVELINK_SERVICE_HEALTHY}\n" yield str return if message == "ping": for a2f in A2fInstance.instaces: a2f.post("") a2f.get("") yield "A2F ping" return if message == "redo": for a2f in A2fInstance.instaces: a2f.player_play() yield "A2F redo" return if message == "stop": global cleanup_queue cleanup_queue = True q.put("cleanup_queue_token") yield "stopped" return if message.startswith("peer"): items = message.split() if len(items) >= 2: gradio_port = int(items[1]) # TODO: support non localhost args.gradio_peer_url = f"http://{args.gradio_host}:{gradio_port}/" global gc_client gc_client = gc.Client(args.gradio_peer_url) yield f"I will chat with another llm-metahuman: {args.gradio_peer_url}" return history_openai_format = [] for human, assistant in history: history_openai_format.append({"role": "user", "content": human}) history_openai_format.append({"role": "assistant", "content": assistant}) history_openai_format.append({"role": "user", "content": message}) # start_time = time.time() response = get_completion(history_openai_format) yield ".." # global cleanup_queue # cleanup_queue = True # q.put("cleanup_queue_token") if args.llm_streaming: # create variables to collect the stream of chunks UNUSED_collected_chunks = [] collected_messages = [] complete_sentences = "" # iterate through the stream of events for chunk in response: # chunk_time = ( # time.time() - start_time # ) # calculate the time delay of the chunk UNUSED_collected_chunks.append(chunk) # save the event response chunk_message = chunk.choices[0].delta.content # extract the message if not chunk_message: continue collected_messages.append(chunk_message) # save the message # print( # f"Message {chunk_time:.2f} s after request: {chunk_message}" # ) # print the delay and text print(chunk_message) if chunk_message in [ ".", "!", "?", "。", "!", "?", ] or chunk_message.endswith("\n"): # if not chunk_message or "\n" in chunk_message: one_sentence = "".join([m for m in collected_messages if m is not None]) if len(one_sentence) < 10: # ignore short sentences continue collected_messages = [] complete_sentences += one_sentence q.put(one_sentence) # run_pipeline(one_sentence) yield complete_sentences talk_to_peer(one_sentence) # print the time delay and text received # print(f"Full response received {chunk_time:.2f} seconds after request") # # clean None in collected_messages # collected_messages = [m for m in collected_messages if m is not None] # full_reply_content = "".join([m for m in collected_messages]) # print(f"Full conversation received: {full_reply_content}") # yield full_reply_content else: if len(response.choices[0].message.content) == 0: return answer = response.choices[0].message.content yield answer run_pipeline(answer) def main(): import argparse parser = argparse.ArgumentParser(description="llm.py arguments") # gradio settings parser.add_argument("--a2f_instance_count", type=int, default=1) parser.add_argument("--gradio_host", default="localhost") parser.add_argument("--gradio_port", type=int, default=7860) parser.add_argument( "--gradio_peer_url", default=None, help="the gradio peer that this gradio instance will chat with. Default value is None, which means chat with a human.", ) # llm / litellm settings parser.add_argument("--llm_engine", default="gpt", choices=["gpt", "llama2"]) parser.add_argument( "--llm_model", default=None, help="https://docs.litellm.ai/docs/providers" ) parser.add_argument("--llm_url", default=None) parser.add_argument( "--llm_streaming", default=True, action=argparse.BooleanOptionalAction ) # audio2face settings parser.add_argument("--a2f_host", default="localhost") parser.add_argument("--a2f_port", default=8011, type=int) parser.add_argument("--a2f_instance_id", default="/World/audio2face/CoreFullface") parser.add_argument("--a2f_player_id", default="/World/audio2face/Player") parser.add_argument("--a2f_livelink_id", default="/World/audio2face/StreamLivelink") # tts settings parser.add_argument("--tts_model", default="tts-1", choices=["tts-1", "tts-1-hd"]) parser.add_argument("--tts_speed", default=1.1, type=float) # livelink settings parser.add_argument("--livelink_host", default="localhost") parser.add_argument("--livelink_port", default=12030, type=int) parser.add_argument("--livelink_subject", default="Audio2Face") parser.add_argument("--livelink_audio_port", default=12031, type=int) parser.add_argument( "--tts_voice", default="nova", choices=VOICE_ACTORS, help="https://platform.openai.com/docs/guides/text-to-speech", ) global args args = parser.parse_args() if not args.llm_model: if args.llm_engine == "gpt": args.llm_model = args.llm_model or "gpt-3.5-turbo" elif args.llm_engine == "llama2": args.llm_model = args.llm_model or "ollama/llama2" args.llm_url = args.llm_url or "http://localhost:11434" threading.Thread(target=pipeline_worker, daemon=True).start() for i in range(args.a2f_instance_count): a2f = A2fInstance(i) a2f.setup() A2fInstance.instaces.append(a2f) global chat_ui chat_ui = gr.ChatInterface( predict, title=f"llm-metahuman @{args.gradio_port}", examples=["hello", "tell me 3 jokes", "what's the meaning of life?"], ) chat_ui.queue().launch(server_name=args.gradio_host, server_port=args.gradio_port) q.join() if __name__ == "__main__": main()
vinjn/llm-metahuman/audio-client/requirements.txt
openai pydub gradio gradio_client requests litellm
vinjn/llm-metahuman/audio-client/avatar.bat
@REM c:\p4\audio2face\run_avatar.bat ^ %localappdata%\ov\pkg\audio2face-2023.1.0-beta.4\avatar.kit.bat ^ --enable omni.services.transport.server.http ^ --enable omni.kit.tool.asset_exporter ^ --enable omni.avatar.livelink ^ --enable omni.avatar.ui.livelink ^ --/app/renderer/sleepMsOutOfFocus=0 ^ --/app/renderer/sleepMsOutOfFocus=0 ^ --/app/asyncRendering=false ^ --/rtx/reflections/enabled=false ^ --/rtx/translucency/enabled=false ^ --/rtx/post/lensFlares/enabled=false ^ --/rtx/post/dof/enabled=false ^ --/rtx/indirectDiffuse/enabled=false ^ %*
vinjn/llm-metahuman/audio-client/ref/pytts-demo.py
import pyttsx3 engine = pyttsx3.init() # object creation """ RATE""" rate = engine.getProperty("rate") # getting details of current speaking rate print(rate) # printing current voice rate engine.setProperty("rate", 125) # setting up new voice rate """VOLUME""" volume = engine.getProperty( "volume" ) # getting to know current volume level (min=0 and max=1) print(volume) # printing current volume level engine.setProperty("volume", 1.0) # setting up volume level between 0 and 1 """VOICE""" voices = engine.getProperty("voices") # getting details of current voice print(voices) engine.setProperty("voice", voices[0].id) # changing index, changes voices. o for male # engine.setProperty('voice', voices[1].id) #changing index, changes voices. 1 for female engine.say("Hello World!") engine.say("说什么 current speaking rate is " + str(rate)) engine.runAndWait() engine.stop() """Saving Voice to a file""" # On linux make sure that 'espeak' and 'ffmpeg' are installed engine.save_to_file("Hello World", "test.mp3") engine.runAndWait()
vinjn/llm-metahuman/audio-client/ref/minimal-chatbot.py
import random import gradio as gr def alternatingly_agree(message, history): if len(history) % 2 == 0: return f"Yes, I do think that '{message}'" else: return "I don't think so" count = 0 def textbox_update(chatui_textbox): global count count += 1 if count % 10 == 0: return "z" else: return chatui_textbox if __name__ == "__main__": with gr.ChatInterface(alternatingly_agree) as chat_ui: chat_ui.textbox.change( textbox_update, chat_ui.textbox, chat_ui.textbox, every=1, trigger_mode="once", ) chat_ui.launch()
vinjn/llm-metahuman/audio-client/ref/portal.py
import gradio as gr def task1(input_text): return "Task 1 Result: " + input_text def task2(input_image): return "Task 2 Result" def task3(input_image): return "Task 2 Result" # interface one iface1 = gr.Interface( fn=task1, inputs="text", outputs="text", title="Multi-Page Interface" ) # interface two iface2 = gr.Interface( fn=task2, inputs="image", outputs="text", title="Multi-Page Interface" ) tts_examples = [ "I love learning machine learning", "How do you do?", ] tts_demo = gr.load( "huggingface/facebook/fastspeech2-en-ljspeech", title=None, examples=tts_examples, description="Give me something to say!", cache_examples=False, ) stt_demo = gr.load( "huggingface/facebook/wav2vec2-base-960h", title=None, inputs="mic", description="Let me try to guess what you're saying!", ) demo = gr.TabbedInterface( [iface1, iface2, tts_demo, stt_demo], ["Text-to-text", "image-to-text", "Text-to-speech", "Speech-to-text"], ) # Run the interface demo.launch(share=True)
vinjn/llm-metahuman/audio-client/ref/sine-curve.py
import math import gradio as gr import plotly.express as px import numpy as np plot_end = 2 * math.pi def get_plot(period=1): global plot_end x = np.arange(plot_end - 2 * math.pi, plot_end, 0.02) y = np.sin(2*math.pi*period * x) fig = px.line(x=x, y=y) plot_end += 2 * math.pi if plot_end > 1000: plot_end = 2 * math.pi return fig with gr.Blocks() as demo: with gr.Row(): with gr.Column(): gr.Markdown("Change the value of the slider to automatically update the plot") period = gr.Slider(label="Period of plot", value=1, minimum=0, maximum=10, step=1) plot = gr.Plot(label="Plot (updates every half second)") dep = demo.load(get_plot, None, plot, every=1) period.change(get_plot, period, plot, every=1, cancels=[dep]) if __name__ == "__main__": demo.queue().launch()
vinjn/llm-metahuman/audio-client/prompt/jira-vs-slack.json
{ "task": "Write a stand-up comedy script with 10 dialogs", "characters": [ { "title": "Software Engineer", "name": "Alloy", "preference": "Email", "description": "Enthusiastic software engineer" }, { "title": "Program Manager", "name": "Nova", "preference": "Slack", "description": "Organized program manager" } ], "topic": "Argument about email vs Slack", "format": "A: says something in one line. B: says something in one line. Remove the number and quotes." }
mnaskret/omni-tetGen/README.md
# omni-tetGen An omniverse extension to generate soft body meshes ![extTestBunny](https://user-images.githubusercontent.com/4333336/185104847-a556bf22-2323-4d70-8bb8-b8a57e1ec67d.gif) ## Description: omni-tetGen uses the famous tetgen mesh generator developed by Hang Si to create tetrahedral and edge meshes for soft body simulation. The extension allows for a user-friendly drag-and-drop mechanism for input mesh data in standard .obj format. Then, it runs the python tetgen wrapper to create meshes which are converted to numpy arrays and described with additional infomration like edges rest lengths or tetrahedra volumes. Generated mesh is added to the stage with additional attributes: - edge - edgesRestLengths - elem - tetrahedronsRestVolumes - inverseMasses ![Screenshot from 2022-08-17 13-22-38](https://user-images.githubusercontent.com/4333336/185106588-6f87d9be-c9f1-4ee4-add1-e3bff3a1538d.png) ## PBD .ogn node Additionally, an omniverse node with a simple Position Based Dynamics algorithm implementation with CUDA kernels is attached in order to test generated meshes. ![Screenshot from 2022-08-17 13-25-31](https://user-images.githubusercontent.com/4333336/185107000-5837f3be-8540-4c5c-884f-1eb7c01b42b8.png) ## Usage - [Install omniverse](https://www.nvidia.com/en-us/omniverse/) with e.g. create environment - Go to: Window -> Extensions -> Gear icon -> Add extension search path: `git://github.com/mnaskret/omni-tetGen.git?branch=main` - Find Tetrahedralizer in the list of extensions and turn it on (preferably with autoload) - In the Tetrahedralizer window you can drop any .obj file from Omniverse Content browser, choose preferred options and generate a cool mesh - Add a graph with PBDBasicGravity node or create your own node that utilizes mesh extra attributes to have fun with your mesh
mnaskret/omni-tetGen/config/extension.toml
[package] # Semantic Versionning is used: https://semver.org/ version = "1.0.0" # The title and description fields are primarily for displaying extension info in UI title = "Tetrahedralizer" description="Generates a tetrahedral mesh from an external triangle mesh." # Path (relative to the root) or content of readme markdown file for UI. readme = "docs/README.md" # URL of the extension source repository. repository = "" # One of categories for UI. category = "Example" # Keywords for the extension keywords = ["kit", "example"] # Use omni.ui to build simple UI [dependencies] "omni.kit.uiapp" = {} # Main python module this extension provides, it will be publicly available as "import mnresearch.tetgen". [[python.module]] name = "mnresearch.tetgen" [python.pipapi] requirements = ['numpy', 'pxr', 'pyvista', 'tetgenExt==0.6.dev0', 'warp'] use_online_index = true
mnaskret/omni-tetGen/mnresearch/tetgen/extension.py
import omni.ext import omni.ui as ui import omni.kit.commands as commands import pxr from pxr import Sdf import numpy as np import tetgenExt import os import math import warp as wp class MyExtension(omni.ext.IExt): fileUrl = '' def drop_accept(url, ext): # Accept drops of specific extension only print("File dropped") return url.endswith(ext) def drop(widget, event): widget.text = event.mime_data MyExtension.fileUrl = event.mime_data def drop_area(self, ext): # If drop is acceptable, the rectangle is blue style = {} style["Rectangle"] = {"background_color": 0xFF999999} style["Rectangle:drop"] = {"background_color": 0xFF994400} stack = ui.ZStack() with stack: ui.Rectangle(style=style) text = ui.Label(f"Accepts {ext}", alignment=ui.Alignment.CENTER, word_wrap=True) self.fileUrl = stack.set_accept_drop_fn(lambda d, e=ext: MyExtension.drop_accept(d, e)) stack.set_drop_fn(lambda a, w=text: MyExtension.drop(w, a)) def createMesh(usd_context, stage, meshName): commands.execute('CreateReferenceCommand', usd_context=usd_context, path_to='/World/' + meshName, asset_path=MyExtension.fileUrl, instanceable=True) prim = stage.GetPrimAtPath('/World/' + meshName + '/' + meshName + '/' + meshName) return prim def addAttributes(stage, prim, node, elem, face, edge, normals, colors, meshName): numberOfTris = int(face.shape[0] / 3) faceCount = np.full((numberOfTris), 3) mesh = pxr.PhysicsSchemaTools.createMesh(stage, pxr.Sdf.Path('/World/' + meshName + 'Mesh'), node.tolist(), normals.tolist(), face.tolist(), faceCount.tolist()) newPrim = stage.GetPrimAtPath('/World/' + meshName + 'Mesh') velocitiesNP = np.zeros_like(node) inverseMasses = np.ones(len(node), dtype=float) edgesRestLengths = np.zeros(len(edge), dtype=float) tetrahedronsRestVolumes = np.zeros(len(elem), dtype=float) for i in range(len(edge)): edgesRestLengths[i] = np.linalg.norm(node[edge[i][0]] - node[edge[i][1]]) for i in range(len(elem)): tetrahedronPositionA = node[elem[i][0]] tetrahedronPositionB = node[elem[i][1]] tetrahedronPositionC = node[elem[i][2]] tetrahedronPositionD = node[elem[i][3]] p1 = tetrahedronPositionB - tetrahedronPositionA p2 = tetrahedronPositionC - tetrahedronPositionA p3 = tetrahedronPositionD - tetrahedronPositionA volume = wp.dot(wp.cross(p1, p2), p3) / 6.0 tetrahedronsRestVolumes[i] = volume velocitiesValue = pxr.Vt.Vec3fArray().FromNumpy(velocitiesNP) elemValue = pxr.Vt.Vec4iArray().FromNumpy(elem) edgeValue = pxr.Vt.Vec2iArray().FromNumpy(edge) edgesRestLengthsValue = pxr.Vt.FloatArray().FromNumpy(edgesRestLengths) inverseMassesValue = pxr.Vt.FloatArray().FromNumpy(inverseMasses) tetrahedronsRestVolumesValue = pxr.Vt.FloatArray().FromNumpy(tetrahedronsRestVolumes) elemAtt = newPrim.CreateAttribute('elem', Sdf.ValueTypeNames.Int4Array) edgeAtt = newPrim.CreateAttribute('edge', Sdf.ValueTypeNames.Int2Array) edgesRestLengthsAtt = newPrim.CreateAttribute('edgesRestLengths', Sdf.ValueTypeNames.FloatArray) inverseMassesAtt = newPrim.CreateAttribute('inverseMasses', Sdf.ValueTypeNames.FloatArray) tetrahedronsRestVolumesAtt = newPrim.CreateAttribute('tetrahedronsRestVolumes', Sdf.ValueTypeNames.FloatArray) velocitiesAtt = newPrim.GetAttribute('velocities') velocitiesAtt.Set(velocitiesValue) elemAtt.Set(elemValue) edgeAtt.Set(edgeValue) edgesRestLengthsAtt.Set(edgesRestLengthsValue) inverseMassesAtt.Set(inverseMassesValue) tetrahedronsRestVolumesAtt.Set(tetrahedronsRestVolumesValue) return mesh, newPrim def extractMeshDataToNP(prim): points = prim.GetAttribute('points').Get() faces = prim.GetAttribute('faceVertexIndices').Get() pointsNP = np.array(points, dtype=float) facesNP = np.array(faces, dtype=int) facesNP = facesNP.reshape((-1, 3)) return pointsNP, facesNP def setPLC(self, value): self.PLC = value def setQuality(self, value): self.Quality = value def cross(a, b): c = [a[1]*b[2] - a[2]*b[1], a[2]*b[0] - a[0]*b[2], a[0]*b[1] - a[1]*b[0]] return c def calculateNormals(node, face): numberOfTris = int(face.shape[0] / 3) normals = np.empty_like(node) for i in range(numberOfTris): pIdA = face[i][0] pIdB = face[i][1] pIdC = face[i][2] pA = node[pIdA] pB = node[pIdB] pC = node[pIdC] vA = pB - pA vB = pC - pA normal = MyExtension.cross(vA, vB) normalized = np.linalg.norm(normal) normals[pIdA] += normalized normals[pIdB] += normalized normals[pIdC] += normalized return normals def on_startup(self, ext_id): print("[mnresearch.tetgen] MyExtension startup") self._window = ui.Window("Tetrahedralizer", width=300, height=300) with self._window.frame: self.PLC = False self.Quality = False with ui.VStack(): MyExtension.drop_area(self, ".obj") with ui.HStack(): ui.Label("PLC", height=0) plcCB = ui.CheckBox(width=20) plcCB.model.add_value_changed_fn( lambda a: MyExtension.setPLC(self, a.get_value_as_bool())) with ui.HStack(): ui.Label("Quality", height=0) qualityCB = ui.CheckBox(width=20) qualityCB.model.add_value_changed_fn( lambda a: MyExtension.setQuality(self, a.get_value_as_bool())) def on_click(): print("clicked!") self.usd_context = omni.usd.get_context() self.stage = self.usd_context.get_stage() if MyExtension.fileUrl != "": meshName = MyExtension.fileUrl.split(os.sep)[-1][:-4] prim = MyExtension.createMesh(self.usd_context, self.stage, meshName) points, faces = MyExtension.extractMeshDataToNP(prim) tet = tetgenExt.TetGen(points, faces) print('Running tetGen on: ', MyExtension.fileUrl, '\nwith options:', 'PLC: ', self.PLC, '\nQuality: ', self.Quality) node, elem, face, edge = tet.tetrahedralize(quality=True, plc=True, facesout=1, edgesout=1) normals = MyExtension.calculateNormals(node, face) colors = np.ones_like(normals) face = face.ravel() mesh, newPrim = MyExtension.addAttributes(self.stage, prim, node, elem, face, edge, normals, colors, meshName) pxr.Usd.Stage.RemovePrim(self.stage, '/World/' + meshName) ui.Button("Generate tetrahedral mesh", clicked_fn=lambda: on_click()) def on_shutdown(self): print("[mnresearch.tetgen] MyExtension shutdown")
mnaskret/omni-tetGen/mnresearch/tetgen/__init__.py
from .extension import *
mnaskret/omni-tetGen/mnresearch/tetgen/PBDBasicGravityDatabase.py
"""Support for simplified access to data on nodes of type mnresearch.tetgen.PBDBasicGravity PBDBasicGravity """ import omni.graph.core as og import traceback import sys import numpy class PBDBasicGravityDatabase(og.Database): """Helper class providing simplified access to data on nodes of type mnresearch.tetgen.PBDBasicGravity Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.edge inputs.edgesRestLengths inputs.elem inputs.gravity inputs.ground inputs.inverseMasses inputs.ks_distance inputs.ks_volume inputs.num_substeps inputs.points inputs.sim_constraints inputs.tetrahedronsRestVolumes inputs.velocities inputs.velocity_dampening Outputs: outputs.points outputs.velocities """ # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, Is_Required, DefaultValue # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:edge', 'int2[]', 0, None, 'Input edges', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:edgesRestLengths', 'float[]', 0, None, 'Input edges rest lengths', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:elem', 'int4[]', 0, None, 'Input tetrahedrons', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:gravity', 'vector3f', 0, None, 'Gravity constant', {og.MetadataKeys.DEFAULT: '[0.0, -9.8, 0.0]'}, True, [0.0, -9.8, 0.0]), ('inputs:ground', 'float', 0, None, 'Ground level', {og.MetadataKeys.DEFAULT: '-100.0'}, True, -100.0), ('inputs:inverseMasses', 'float[]', 0, None, 'Inverse masses', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:ks_distance', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '1.0'}, True, 1.0), ('inputs:ks_volume', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '1.0'}, True, 1.0), ('inputs:num_substeps', 'int', 0, None, '', {og.MetadataKeys.DEFAULT: '8'}, True, 8), ('inputs:points', 'point3f[]', 0, None, 'Input points', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:sim_constraints', 'int', 0, None, '', {og.MetadataKeys.DEFAULT: '1'}, True, 1), ('inputs:tetrahedronsRestVolumes', 'float[]', 0, None, 'Input tetrahedrons rest volumes', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:velocities', 'vector3f[]', 0, None, 'Input velocities', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:velocity_dampening', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '0.1'}, True, 0.1), ('outputs:points', 'point3f[]', 0, None, 'Output points', {}, True, None), ('outputs:velocities', 'vector3f[]', 0, None, 'Output velocities', {}, True, None), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.gravity = og.Database.ROLE_VECTOR role_data.inputs.points = og.Database.ROLE_POINT role_data.inputs.velocities = og.Database.ROLE_VECTOR role_data.outputs.points = og.Database.ROLE_POINT role_data.outputs.velocities = og.Database.ROLE_VECTOR return role_data class ValuesForInputs(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) @property def edge(self): data_view = og.AttributeValueHelper(self._attributes.edge) return data_view.get() @edge.setter def edge(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.edge) data_view = og.AttributeValueHelper(self._attributes.edge) data_view.set(value) self.edge_size = data_view.get_array_size() @property def edgesRestLengths(self): data_view = og.AttributeValueHelper(self._attributes.edgesRestLengths) return data_view.get() @edgesRestLengths.setter def edgesRestLengths(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.edgesRestLengths) data_view = og.AttributeValueHelper(self._attributes.edgesRestLengths) data_view.set(value) self.edgesRestLengths_size = data_view.get_array_size() @property def elem(self): data_view = og.AttributeValueHelper(self._attributes.elem) return data_view.get() @elem.setter def elem(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.elem) data_view = og.AttributeValueHelper(self._attributes.elem) data_view.set(value) self.elem_size = data_view.get_array_size() @property def gravity(self): data_view = og.AttributeValueHelper(self._attributes.gravity) return data_view.get() @gravity.setter def gravity(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gravity) data_view = og.AttributeValueHelper(self._attributes.gravity) data_view.set(value) @property def ground(self): data_view = og.AttributeValueHelper(self._attributes.ground) return data_view.get() @ground.setter def ground(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ground) data_view = og.AttributeValueHelper(self._attributes.ground) data_view.set(value) @property def inverseMasses(self): data_view = og.AttributeValueHelper(self._attributes.inverseMasses) return data_view.get() @inverseMasses.setter def inverseMasses(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.inverseMasses) data_view = og.AttributeValueHelper(self._attributes.inverseMasses) data_view.set(value) self.inverseMasses_size = data_view.get_array_size() @property def ks_distance(self): data_view = og.AttributeValueHelper(self._attributes.ks_distance) return data_view.get() @ks_distance.setter def ks_distance(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ks_distance) data_view = og.AttributeValueHelper(self._attributes.ks_distance) data_view.set(value) @property def ks_volume(self): data_view = og.AttributeValueHelper(self._attributes.ks_volume) return data_view.get() @ks_volume.setter def ks_volume(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ks_volume) data_view = og.AttributeValueHelper(self._attributes.ks_volume) data_view.set(value) @property def num_substeps(self): data_view = og.AttributeValueHelper(self._attributes.num_substeps) return data_view.get() @num_substeps.setter def num_substeps(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.num_substeps) data_view = og.AttributeValueHelper(self._attributes.num_substeps) data_view.set(value) @property def points(self): data_view = og.AttributeValueHelper(self._attributes.points) return data_view.get() @points.setter def points(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.points) data_view = og.AttributeValueHelper(self._attributes.points) data_view.set(value) self.points_size = data_view.get_array_size() @property def sim_constraints(self): data_view = og.AttributeValueHelper(self._attributes.sim_constraints) return data_view.get() @sim_constraints.setter def sim_constraints(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sim_constraints) data_view = og.AttributeValueHelper(self._attributes.sim_constraints) data_view.set(value) @property def tetrahedronsRestVolumes(self): data_view = og.AttributeValueHelper(self._attributes.tetrahedronsRestVolumes) return data_view.get() @tetrahedronsRestVolumes.setter def tetrahedronsRestVolumes(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.tetrahedronsRestVolumes) data_view = og.AttributeValueHelper(self._attributes.tetrahedronsRestVolumes) data_view.set(value) self.tetrahedronsRestVolumes_size = data_view.get_array_size() @property def velocities(self): data_view = og.AttributeValueHelper(self._attributes.velocities) return data_view.get() @velocities.setter def velocities(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.velocities) data_view = og.AttributeValueHelper(self._attributes.velocities) data_view.set(value) self.velocities_size = data_view.get_array_size() @property def velocity_dampening(self): data_view = og.AttributeValueHelper(self._attributes.velocity_dampening) return data_view.get() @velocity_dampening.setter def velocity_dampening(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.velocity_dampening) data_view = og.AttributeValueHelper(self._attributes.velocity_dampening) data_view.set(value) class ValuesForOutputs(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.points_size = None self.velocities_size = None @property def points(self): data_view = og.AttributeValueHelper(self._attributes.points) return data_view.get(reserved_element_count = self.points_size) @points.setter def points(self, value): data_view = og.AttributeValueHelper(self._attributes.points) data_view.set(value) self.points_size = data_view.get_array_size() @property def velocities(self): data_view = og.AttributeValueHelper(self._attributes.velocities) return data_view.get(reserved_element_count = self.velocities_size) @velocities.setter def velocities(self, value): data_view = og.AttributeValueHelper(self._attributes.velocities) data_view.set(value) self.velocities_size = data_view.get_array_size() class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = PBDBasicGravityDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = PBDBasicGravityDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = PBDBasicGravityDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes) class abi: """Class defining the ABI interface for the node type""" @staticmethod def get_node_type(): get_node_type_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'get_node_type', None) if callable(get_node_type_function): return get_node_type_function() return 'mnresearch.tetgen.PBDBasicGravity' @staticmethod def compute(context, node): db = PBDBasicGravityDatabase(node) try: db.inputs._setting_locked = True compute_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'compute', None) if callable(compute_function) and compute_function.__code__.co_argcount > 1: return compute_function(context, node) return PBDBasicGravityDatabase.NODE_TYPE_CLASS.compute(db) except Exception as error: stack_trace = "".join(traceback.format_tb(sys.exc_info()[2].tb_next)) db.log_error(f'Assertion raised in compute - {error}\n{stack_trace}', add_context=False) finally: db.inputs._setting_locked = False return False @staticmethod def initialize(context, node): PBDBasicGravityDatabase._initialize_per_node_data(node) # Set any default values the attributes have specified if not node._do_not_use(): db = PBDBasicGravityDatabase(node) db.inputs.edge = [] db.inputs.edgesRestLengths = [] db.inputs.elem = [] db.inputs.gravity = [0.0, -9.8, 0.0] db.inputs.ground = -100.0 db.inputs.inverseMasses = [] db.inputs.ks_distance = 1.0 db.inputs.ks_volume = 1.0 db.inputs.num_substeps = 8 db.inputs.points = [] db.inputs.sim_constraints = 1 db.inputs.tetrahedronsRestVolumes = [] db.inputs.velocities = [] db.inputs.velocity_dampening = 0.1 initialize_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'initialize', None) if callable(initialize_function): initialize_function(context, node) @staticmethod def release(node): release_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'release', None) if callable(release_function): release_function(node) PBDBasicGravityDatabase._release_per_node_data(node) @staticmethod def update_node_version(context, node, old_version, new_version): update_node_version_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'update_node_version', None) if callable(update_node_version_function): return update_node_version_function(context, node, old_version, new_version) return False @staticmethod def initialize_type(node_type): initialize_type_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'initialize_type', None) needs_initializing = True if callable(initialize_type_function): needs_initializing = initialize_type_function(node_type) if needs_initializing: node_type.set_metadata(og.MetadataKeys.EXTENSION, "mnresearch.tetgen") node_type.set_metadata(og.MetadataKeys.UI_NAME, "PBDBasicGravity") node_type.set_metadata(og.MetadataKeys.DESCRIPTION, "PBDBasicGravity") node_type.set_metadata(og.MetadataKeys.LANGUAGE, "Python") PBDBasicGravityDatabase.INTERFACE.add_to_node_type(node_type) @staticmethod def on_connection_type_resolve(node): on_connection_type_resolve_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'on_connection_type_resolve', None) if callable(on_connection_type_resolve_function): on_connection_type_resolve_function(node) NODE_TYPE_CLASS = None GENERATOR_VERSION = (1, 4, 0) TARGET_VERSION = (2, 29, 1) @staticmethod def register(node_type_class): PBDBasicGravityDatabase.NODE_TYPE_CLASS = node_type_class og.register_node_type(PBDBasicGravityDatabase.abi, 1) @staticmethod def deregister(): og.deregister_node_type("mnresearch.tetgen.PBDBasicGravity")
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/OgnNewNodeDatabase.py
"""Support for simplified access to data on nodes of type mnresearch.tetgen.PBDBasicGravity PBDGravity """ import omni.graph.core as og import sys import traceback import numpy class OgnNewNodeDatabase(og.Database): """Helper class providing simplified access to data on nodes of type mnresearch.tetgen.PBDBasicGravity Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.edge inputs.edgesRestLengths inputs.elem inputs.gravity inputs.ground inputs.inverseMasses inputs.ks_distance inputs.ks_volume inputs.num_substeps inputs.points inputs.sim_constraints inputs.tetrahedronsRestVolumes inputs.velocities inputs.velocity_dampening Outputs: outputs.points outputs.velocities """ # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, Is_Required, DefaultValue # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:edge', 'int2[]', 0, None, 'Input edges', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:edgesRestLengths', 'float[]', 0, None, 'Input edges rest lengths', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:elem', 'int4[]', 0, None, 'Input tetrahedrons', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:gravity', 'vector3f', 0, None, 'Gravity constant', {og.MetadataKeys.DEFAULT: '[0.0, -9.8, 0.0]'}, True, [0.0, -9.8, 0.0]), ('inputs:ground', 'float', 0, None, 'Ground level', {og.MetadataKeys.DEFAULT: '-100.0'}, True, -100.0), ('inputs:inverseMasses', 'float[]', 0, None, 'Inverse masses', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:ks_distance', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '1.0'}, True, 1.0), ('inputs:ks_volume', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '1.0'}, True, 1.0), ('inputs:num_substeps', 'int', 0, None, '', {og.MetadataKeys.DEFAULT: '8'}, True, 8), ('inputs:points', 'point3f[]', 0, None, 'Input points', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:sim_constraints', 'int', 0, None, '', {og.MetadataKeys.DEFAULT: '1'}, True, 1), ('inputs:tetrahedronsRestVolumes', 'float[]', 0, None, 'Input tetrahedrons rest volumes', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:velocities', 'vector3f[]', 0, None, 'Input velocities', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:velocity_dampening', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '0.1'}, True, 0.1), ('outputs:points', 'point3f[]', 0, None, 'Output points', {}, True, None), ('outputs:velocities', 'vector3f[]', 0, None, 'Output velocities', {}, True, None), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.gravity = og.Database.ROLE_VECTOR role_data.inputs.points = og.Database.ROLE_POINT role_data.inputs.velocities = og.Database.ROLE_VECTOR role_data.outputs.points = og.Database.ROLE_POINT role_data.outputs.velocities = og.Database.ROLE_VECTOR return role_data class ValuesForInputs(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) @property def edge(self): data_view = og.AttributeValueHelper(self._attributes.edge) return data_view.get() @edge.setter def edge(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.edge) data_view = og.AttributeValueHelper(self._attributes.edge) data_view.set(value) self.edge_size = data_view.get_array_size() @property def edgesRestLengths(self): data_view = og.AttributeValueHelper(self._attributes.edgesRestLengths) return data_view.get() @edgesRestLengths.setter def edgesRestLengths(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.edgesRestLengths) data_view = og.AttributeValueHelper(self._attributes.edgesRestLengths) data_view.set(value) self.edgesRestLengths_size = data_view.get_array_size() @property def elem(self): data_view = og.AttributeValueHelper(self._attributes.elem) return data_view.get() @elem.setter def elem(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.elem) data_view = og.AttributeValueHelper(self._attributes.elem) data_view.set(value) self.elem_size = data_view.get_array_size() @property def gravity(self): data_view = og.AttributeValueHelper(self._attributes.gravity) return data_view.get() @gravity.setter def gravity(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gravity) data_view = og.AttributeValueHelper(self._attributes.gravity) data_view.set(value) @property def ground(self): data_view = og.AttributeValueHelper(self._attributes.ground) return data_view.get() @ground.setter def ground(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ground) data_view = og.AttributeValueHelper(self._attributes.ground) data_view.set(value) @property def inverseMasses(self): data_view = og.AttributeValueHelper(self._attributes.inverseMasses) return data_view.get() @inverseMasses.setter def inverseMasses(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.inverseMasses) data_view = og.AttributeValueHelper(self._attributes.inverseMasses) data_view.set(value) self.inverseMasses_size = data_view.get_array_size() @property def ks_distance(self): data_view = og.AttributeValueHelper(self._attributes.ks_distance) return data_view.get() @ks_distance.setter def ks_distance(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ks_distance) data_view = og.AttributeValueHelper(self._attributes.ks_distance) data_view.set(value) @property def ks_volume(self): data_view = og.AttributeValueHelper(self._attributes.ks_volume) return data_view.get() @ks_volume.setter def ks_volume(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ks_volume) data_view = og.AttributeValueHelper(self._attributes.ks_volume) data_view.set(value) @property def num_substeps(self): data_view = og.AttributeValueHelper(self._attributes.num_substeps) return data_view.get() @num_substeps.setter def num_substeps(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.num_substeps) data_view = og.AttributeValueHelper(self._attributes.num_substeps) data_view.set(value) @property def points(self): data_view = og.AttributeValueHelper(self._attributes.points) return data_view.get() @points.setter def points(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.points) data_view = og.AttributeValueHelper(self._attributes.points) data_view.set(value) self.points_size = data_view.get_array_size() @property def sim_constraints(self): data_view = og.AttributeValueHelper(self._attributes.sim_constraints) return data_view.get() @sim_constraints.setter def sim_constraints(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sim_constraints) data_view = og.AttributeValueHelper(self._attributes.sim_constraints) data_view.set(value) @property def tetrahedronsRestVolumes(self): data_view = og.AttributeValueHelper(self._attributes.tetrahedronsRestVolumes) return data_view.get() @tetrahedronsRestVolumes.setter def tetrahedronsRestVolumes(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.tetrahedronsRestVolumes) data_view = og.AttributeValueHelper(self._attributes.tetrahedronsRestVolumes) data_view.set(value) self.tetrahedronsRestVolumes_size = data_view.get_array_size() @property def velocities(self): data_view = og.AttributeValueHelper(self._attributes.velocities) return data_view.get() @velocities.setter def velocities(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.velocities) data_view = og.AttributeValueHelper(self._attributes.velocities) data_view.set(value) self.velocities_size = data_view.get_array_size() @property def velocity_dampening(self): data_view = og.AttributeValueHelper(self._attributes.velocity_dampening) return data_view.get() @velocity_dampening.setter def velocity_dampening(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.velocity_dampening) data_view = og.AttributeValueHelper(self._attributes.velocity_dampening) data_view.set(value) class ValuesForOutputs(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.points_size = None self.velocities_size = None @property def points(self): data_view = og.AttributeValueHelper(self._attributes.points) return data_view.get(reserved_element_count = self.points_size) @points.setter def points(self, value): data_view = og.AttributeValueHelper(self._attributes.points) data_view.set(value) self.points_size = data_view.get_array_size() @property def velocities(self): data_view = og.AttributeValueHelper(self._attributes.velocities) return data_view.get(reserved_element_count = self.velocities_size) @velocities.setter def velocities(self, value): data_view = og.AttributeValueHelper(self._attributes.velocities) data_view.set(value) self.velocities_size = data_view.get_array_size() class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = OgnNewNodeDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = OgnNewNodeDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = OgnNewNodeDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes) class abi: """Class defining the ABI interface for the node type""" @staticmethod def get_node_type(): get_node_type_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'get_node_type', None) if callable(get_node_type_function): return get_node_type_function() return 'mnresearch.tetgen.PBDBasicGravity' @staticmethod def compute(context, node): db = OgnNewNodeDatabase(node) try: db.inputs._setting_locked = True compute_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'compute', None) if callable(compute_function) and compute_function.__code__.co_argcount > 1: return compute_function(context, node) return OgnNewNodeDatabase.NODE_TYPE_CLASS.compute(db) except Exception as error: stack_trace = "".join(traceback.format_tb(sys.exc_info()[2].tb_next)) db.log_error(f'Assertion raised in compute - {error}\n{stack_trace}', add_context=False) finally: db.inputs._setting_locked = False return False @staticmethod def initialize(context, node): OgnNewNodeDatabase._initialize_per_node_data(node) # Set any default values the attributes have specified if not node._do_not_use(): db = OgnNewNodeDatabase(node) db.inputs.edge = [] db.inputs.edgesRestLengths = [] db.inputs.elem = [] db.inputs.gravity = [0.0, -9.8, 0.0] db.inputs.ground = -100.0 db.inputs.inverseMasses = [] db.inputs.ks_distance = 1.0 db.inputs.ks_volume = 1.0 db.inputs.num_substeps = 8 db.inputs.points = [] db.inputs.sim_constraints = 1 db.inputs.tetrahedronsRestVolumes = [] db.inputs.velocities = [] db.inputs.velocity_dampening = 0.1 initialize_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'initialize', None) if callable(initialize_function): initialize_function(context, node) @staticmethod def release(node): release_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'release', None) if callable(release_function): release_function(node) OgnNewNodeDatabase._release_per_node_data(node) @staticmethod def update_node_version(context, node, old_version, new_version): update_node_version_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'update_node_version', None) if callable(update_node_version_function): return update_node_version_function(context, node, old_version, new_version) return False @staticmethod def initialize_type(node_type): initialize_type_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'initialize_type', None) needs_initializing = True if callable(initialize_type_function): needs_initializing = initialize_type_function(node_type) if needs_initializing: node_type.set_metadata(og.MetadataKeys.EXTENSION, "mnaskret.pbdgravity") node_type.set_metadata(og.MetadataKeys.UI_NAME, "PBDGravity") node_type.set_metadata(og.MetadataKeys.DESCRIPTION, "PBDGravity") node_type.set_metadata(og.MetadataKeys.LANGUAGE, "Python") OgnNewNodeDatabase.INTERFACE.add_to_node_type(node_type) @staticmethod def on_connection_type_resolve(node): on_connection_type_resolve_function = getattr(OgnNewNodeDatabase.NODE_TYPE_CLASS, 'on_connection_type_resolve', None) if callable(on_connection_type_resolve_function): on_connection_type_resolve_function(node) NODE_TYPE_CLASS = None GENERATOR_VERSION = (1, 4, 0) TARGET_VERSION = (2, 29, 1) @staticmethod def register(node_type_class): OgnNewNodeDatabase.NODE_TYPE_CLASS = node_type_class og.register_node_type(OgnNewNodeDatabase.abi, 1) @staticmethod def deregister(): og.deregister_node_type("mnaskret.pbdgravity.PBDGravity")
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/PBDBasicGravityDatabase.py
"""Support for simplified access to data on nodes of type mnresearch.tetgen.PBDBasicGravity PBDBasicGravity """ import omni.graph.core as og import traceback import sys import numpy class PBDBasicGravityDatabase(og.Database): """Helper class providing simplified access to data on nodes of type mnresearch.tetgen.PBDBasicGravity Class Members: node: Node being evaluated Attribute Value Properties: Inputs: inputs.edge inputs.edgesRestLengths inputs.elem inputs.gravity inputs.ground inputs.inverseMasses inputs.ks_distance inputs.ks_volume inputs.num_substeps inputs.points inputs.sim_constraints inputs.tetrahedronsRestVolumes inputs.velocities inputs.velocity_dampening Outputs: outputs.points outputs.velocities """ # This is an internal object that provides per-class storage of a per-node data dictionary PER_NODE_DATA = {} # This is an internal object that describes unchanging attributes in a generic way # The values in this list are in no particular order, as a per-attribute tuple # Name, Type, ExtendedTypeIndex, UiName, Description, Metadata, Is_Required, DefaultValue # You should not need to access any of this data directly, use the defined database interfaces INTERFACE = og.Database._get_interface([ ('inputs:edge', 'int2[]', 0, None, 'Input edges', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:edgesRestLengths', 'float[]', 0, None, 'Input edges rest lengths', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:elem', 'int4[]', 0, None, 'Input tetrahedrons', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:gravity', 'vector3f', 0, None, 'Gravity constant', {og.MetadataKeys.DEFAULT: '[0.0, -9.8, 0.0]'}, True, [0.0, -9.8, 0.0]), ('inputs:ground', 'float', 0, None, 'Ground level', {og.MetadataKeys.DEFAULT: '-100.0'}, True, -100.0), ('inputs:inverseMasses', 'float[]', 0, None, 'Inverse masses', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:ks_distance', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '1.0'}, True, 1.0), ('inputs:ks_volume', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '1.0'}, True, 1.0), ('inputs:num_substeps', 'int', 0, None, '', {og.MetadataKeys.DEFAULT: '8'}, True, 8), ('inputs:points', 'point3f[]', 0, None, 'Input points', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:sim_constraints', 'int', 0, None, '', {og.MetadataKeys.DEFAULT: '1'}, True, 1), ('inputs:tetrahedronsRestVolumes', 'float[]', 0, None, 'Input tetrahedrons rest volumes', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:velocities', 'vector3f[]', 0, None, 'Input velocities', {og.MetadataKeys.DEFAULT: '[]'}, True, []), ('inputs:velocity_dampening', 'float', 0, None, '', {og.MetadataKeys.DEFAULT: '0.1'}, True, 0.1), ('outputs:points', 'point3f[]', 0, None, 'Output points', {}, True, None), ('outputs:velocities', 'vector3f[]', 0, None, 'Output velocities', {}, True, None), ]) @classmethod def _populate_role_data(cls): """Populate a role structure with the non-default roles on this node type""" role_data = super()._populate_role_data() role_data.inputs.gravity = og.Database.ROLE_VECTOR role_data.inputs.points = og.Database.ROLE_POINT role_data.inputs.velocities = og.Database.ROLE_VECTOR role_data.outputs.points = og.Database.ROLE_POINT role_data.outputs.velocities = og.Database.ROLE_VECTOR return role_data class ValuesForInputs(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to input attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) @property def edge(self): data_view = og.AttributeValueHelper(self._attributes.edge) return data_view.get() @edge.setter def edge(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.edge) data_view = og.AttributeValueHelper(self._attributes.edge) data_view.set(value) self.edge_size = data_view.get_array_size() @property def edgesRestLengths(self): data_view = og.AttributeValueHelper(self._attributes.edgesRestLengths) return data_view.get() @edgesRestLengths.setter def edgesRestLengths(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.edgesRestLengths) data_view = og.AttributeValueHelper(self._attributes.edgesRestLengths) data_view.set(value) self.edgesRestLengths_size = data_view.get_array_size() @property def elem(self): data_view = og.AttributeValueHelper(self._attributes.elem) return data_view.get() @elem.setter def elem(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.elem) data_view = og.AttributeValueHelper(self._attributes.elem) data_view.set(value) self.elem_size = data_view.get_array_size() @property def gravity(self): data_view = og.AttributeValueHelper(self._attributes.gravity) return data_view.get() @gravity.setter def gravity(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.gravity) data_view = og.AttributeValueHelper(self._attributes.gravity) data_view.set(value) @property def ground(self): data_view = og.AttributeValueHelper(self._attributes.ground) return data_view.get() @ground.setter def ground(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ground) data_view = og.AttributeValueHelper(self._attributes.ground) data_view.set(value) @property def inverseMasses(self): data_view = og.AttributeValueHelper(self._attributes.inverseMasses) return data_view.get() @inverseMasses.setter def inverseMasses(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.inverseMasses) data_view = og.AttributeValueHelper(self._attributes.inverseMasses) data_view.set(value) self.inverseMasses_size = data_view.get_array_size() @property def ks_distance(self): data_view = og.AttributeValueHelper(self._attributes.ks_distance) return data_view.get() @ks_distance.setter def ks_distance(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ks_distance) data_view = og.AttributeValueHelper(self._attributes.ks_distance) data_view.set(value) @property def ks_volume(self): data_view = og.AttributeValueHelper(self._attributes.ks_volume) return data_view.get() @ks_volume.setter def ks_volume(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.ks_volume) data_view = og.AttributeValueHelper(self._attributes.ks_volume) data_view.set(value) @property def num_substeps(self): data_view = og.AttributeValueHelper(self._attributes.num_substeps) return data_view.get() @num_substeps.setter def num_substeps(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.num_substeps) data_view = og.AttributeValueHelper(self._attributes.num_substeps) data_view.set(value) @property def points(self): data_view = og.AttributeValueHelper(self._attributes.points) return data_view.get() @points.setter def points(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.points) data_view = og.AttributeValueHelper(self._attributes.points) data_view.set(value) self.points_size = data_view.get_array_size() @property def sim_constraints(self): data_view = og.AttributeValueHelper(self._attributes.sim_constraints) return data_view.get() @sim_constraints.setter def sim_constraints(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.sim_constraints) data_view = og.AttributeValueHelper(self._attributes.sim_constraints) data_view.set(value) @property def tetrahedronsRestVolumes(self): data_view = og.AttributeValueHelper(self._attributes.tetrahedronsRestVolumes) return data_view.get() @tetrahedronsRestVolumes.setter def tetrahedronsRestVolumes(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.tetrahedronsRestVolumes) data_view = og.AttributeValueHelper(self._attributes.tetrahedronsRestVolumes) data_view.set(value) self.tetrahedronsRestVolumes_size = data_view.get_array_size() @property def velocities(self): data_view = og.AttributeValueHelper(self._attributes.velocities) return data_view.get() @velocities.setter def velocities(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.velocities) data_view = og.AttributeValueHelper(self._attributes.velocities) data_view.set(value) self.velocities_size = data_view.get_array_size() @property def velocity_dampening(self): data_view = og.AttributeValueHelper(self._attributes.velocity_dampening) return data_view.get() @velocity_dampening.setter def velocity_dampening(self, value): if self._setting_locked: raise og.ReadOnlyError(self._attributes.velocity_dampening) data_view = og.AttributeValueHelper(self._attributes.velocity_dampening) data_view.set(value) class ValuesForOutputs(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to output attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) self.points_size = None self.velocities_size = None @property def points(self): data_view = og.AttributeValueHelper(self._attributes.points) return data_view.get(reserved_element_count = self.points_size) @points.setter def points(self, value): data_view = og.AttributeValueHelper(self._attributes.points) data_view.set(value) self.points_size = data_view.get_array_size() @property def velocities(self): data_view = og.AttributeValueHelper(self._attributes.velocities) return data_view.get(reserved_element_count = self.velocities_size) @velocities.setter def velocities(self, value): data_view = og.AttributeValueHelper(self._attributes.velocities) data_view.set(value) self.velocities_size = data_view.get_array_size() class ValuesForState(og.DynamicAttributeAccess): """Helper class that creates natural hierarchical access to state attributes""" def __init__(self, node: og.Node, attributes, dynamic_attributes: og.DynamicAttributeInterface): """Initialize simplified access for the attribute data""" context = node.get_graph().get_default_graph_context() super().__init__(context, node, attributes, dynamic_attributes) def __init__(self, node): super().__init__(node) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_INPUT) self.inputs = PBDBasicGravityDatabase.ValuesForInputs(node, self.attributes.inputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_OUTPUT) self.outputs = PBDBasicGravityDatabase.ValuesForOutputs(node, self.attributes.outputs, dynamic_attributes) dynamic_attributes = self.dynamic_attribute_data(node, og.AttributePortType.ATTRIBUTE_PORT_TYPE_STATE) self.state = PBDBasicGravityDatabase.ValuesForState(node, self.attributes.state, dynamic_attributes) class abi: """Class defining the ABI interface for the node type""" @staticmethod def get_node_type(): get_node_type_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'get_node_type', None) if callable(get_node_type_function): return get_node_type_function() return 'mnresearch.tetgen.PBDBasicGravity' @staticmethod def compute(context, node): db = PBDBasicGravityDatabase(node) try: db.inputs._setting_locked = True compute_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'compute', None) if callable(compute_function) and compute_function.__code__.co_argcount > 1: return compute_function(context, node) return PBDBasicGravityDatabase.NODE_TYPE_CLASS.compute(db) except Exception as error: stack_trace = "".join(traceback.format_tb(sys.exc_info()[2].tb_next)) db.log_error(f'Assertion raised in compute - {error}\n{stack_trace}', add_context=False) finally: db.inputs._setting_locked = False return False @staticmethod def initialize(context, node): PBDBasicGravityDatabase._initialize_per_node_data(node) # Set any default values the attributes have specified if not node._do_not_use(): db = PBDBasicGravityDatabase(node) db.inputs.edge = [] db.inputs.edgesRestLengths = [] db.inputs.elem = [] db.inputs.gravity = [0.0, -9.8, 0.0] db.inputs.ground = -100.0 db.inputs.inverseMasses = [] db.inputs.ks_distance = 1.0 db.inputs.ks_volume = 1.0 db.inputs.num_substeps = 8 db.inputs.points = [] db.inputs.sim_constraints = 1 db.inputs.tetrahedronsRestVolumes = [] db.inputs.velocities = [] db.inputs.velocity_dampening = 0.1 initialize_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'initialize', None) if callable(initialize_function): initialize_function(context, node) @staticmethod def release(node): release_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'release', None) if callable(release_function): release_function(node) PBDBasicGravityDatabase._release_per_node_data(node) @staticmethod def update_node_version(context, node, old_version, new_version): update_node_version_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'update_node_version', None) if callable(update_node_version_function): return update_node_version_function(context, node, old_version, new_version) return False @staticmethod def initialize_type(node_type): initialize_type_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'initialize_type', None) needs_initializing = True if callable(initialize_type_function): needs_initializing = initialize_type_function(node_type) if needs_initializing: node_type.set_metadata(og.MetadataKeys.EXTENSION, "mnresearch.tetgen") node_type.set_metadata(og.MetadataKeys.UI_NAME, "PBDBasicGravity") node_type.set_metadata(og.MetadataKeys.DESCRIPTION, "PBDBasicGravity") node_type.set_metadata(og.MetadataKeys.LANGUAGE, "Python") PBDBasicGravityDatabase.INTERFACE.add_to_node_type(node_type) @staticmethod def on_connection_type_resolve(node): on_connection_type_resolve_function = getattr(PBDBasicGravityDatabase.NODE_TYPE_CLASS, 'on_connection_type_resolve', None) if callable(on_connection_type_resolve_function): on_connection_type_resolve_function(node) NODE_TYPE_CLASS = None GENERATOR_VERSION = (1, 4, 0) TARGET_VERSION = (2, 29, 1) @staticmethod def register(node_type_class): PBDBasicGravityDatabase.NODE_TYPE_CLASS = node_type_class og.register_node_type(PBDBasicGravityDatabase.abi, 1) @staticmethod def deregister(): og.deregister_node_type("mnresearch.tetgen.PBDBasicGravity")
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/nodes/PBDBasicGravity.py
""" This is the implementation of the OGN node defined in OgnNewNode.ogn """ # Array or tuple values are accessed as numpy arrays so you probably need this import import math import numpy as np import warp as wp import omni.timeline from pxr import Usd, UsdGeom, Gf, Sdf @wp.kernel def boundsKer(predictedPositions: wp.array(dtype=wp.vec3), groundLevel: float): tid = wp.tid() x = predictedPositions[tid] if(x[1] < groundLevel): predictedPositions[tid] = wp.vec3(x[0], groundLevel, x[2]) @wp.kernel def PBDStepKer(positions: wp.array(dtype=wp.vec3), predictedPositions: wp.array(dtype=wp.vec3), velocities: wp.array(dtype=wp.vec3), dT: float): tid = wp.tid() x = positions[tid] xPred = predictedPositions[tid] v = (xPred - x)*(1.0/dT) x = xPred positions[tid] = x velocities[tid] = v @wp.kernel def gravityKer(positions: wp.array(dtype=wp.vec3), predictedPositions: wp.array(dtype=wp.vec3), velocities: wp.array(dtype=wp.vec3), gravityConstant: wp.vec3, velocityDampening: float, dt: float): tid = wp.tid() x = positions[tid] v = velocities[tid] velocityDampening = 1.0 - velocityDampening v = v + gravityConstant*dt*velocityDampening xPred = x + v*dt predictedPositions[tid] = xPred @wp.kernel def distanceConstraints(predictedPositions: wp.array(dtype=wp.vec3), dP: wp.array(dtype=wp.vec3), constraintsNumber: wp.array(dtype=int), edgesA: wp.array(dtype=int), edgesB: wp.array(dtype=int), edgesRestLengths: wp.array(dtype=float), inverseMasses: wp.array(dtype=float), kS: float): tid = wp.tid() edgeIndexA = edgesA[tid] edgeIndexB = edgesB[tid] edgePositionA = predictedPositions[edgeIndexA] edgePositionB = predictedPositions[edgeIndexB] edgeRestLength = edgesRestLengths[tid] dir = edgePositionA - edgePositionB len = wp.length(dir) inverseMass = inverseMasses[edgeIndexA] + inverseMasses[edgeIndexB] edgeDP = (len-edgeRestLength) * wp.normalize(dir) * kS / inverseMass wp.atomic_sub(dP, edgeIndexA, edgeDP) wp.atomic_add(dP, edgeIndexB, edgeDP) wp.atomic_add(constraintsNumber, edgeIndexA, 1) wp.atomic_add(constraintsNumber, edgeIndexB, 1) @wp.kernel def volumeConstraints(predictedPositions: wp.array(dtype=wp.vec3), dP: wp.array(dtype=wp.vec3), constraintsNumber: wp.array(dtype=int), tetrahedronsA: wp.array(dtype=int), tetrahedronsB: wp.array(dtype=int), tetrahedronsC: wp.array(dtype=int), tetrahedronsD: wp.array(dtype=int), tetrahedronsRestVolumes: wp.array(dtype=float), inverseMasses: wp.array(dtype=float), kS: float): tid = wp.tid() tetrahedronIndexA = tetrahedronsA[tid] tetrahedronIndexB = tetrahedronsB[tid] tetrahedronIndexC = tetrahedronsC[tid] tetrahedronIndexD = tetrahedronsD[tid] tetrahedronPositionA = predictedPositions[tetrahedronIndexA] tetrahedronPositionB = predictedPositions[tetrahedronIndexB] tetrahedronPositionC = predictedPositions[tetrahedronIndexC] tetrahedronPositionD = predictedPositions[tetrahedronIndexD] tetrahedronRestVolume = tetrahedronsRestVolumes[tid] p1 = tetrahedronPositionB - tetrahedronPositionA p2 = tetrahedronPositionC - tetrahedronPositionA p3 = tetrahedronPositionD - tetrahedronPositionA q2 = wp.cross(p3, p1) q1 = wp.cross(p2, p3) q3 = wp.cross(p1, p2) q0 = - q1 - q2 - q3 mA = inverseMasses[tetrahedronIndexA] mB = inverseMasses[tetrahedronIndexB] mC = inverseMasses[tetrahedronIndexC] mD = inverseMasses[tetrahedronIndexD] volume = wp.dot(wp.cross(p1, p2), p3) / 6.0 lambd = mA * wp.dot(q0, q0) + mB * wp.dot(q1, q1) + mC * wp.dot(q2, q2) + mD * wp.dot(q3, q3) lambd = kS * (volume - tetrahedronRestVolume) / lambd wp.atomic_sub(dP, tetrahedronIndexA, q0 * lambd * mA) wp.atomic_sub(dP, tetrahedronIndexB, q1 * lambd * mB) wp.atomic_sub(dP, tetrahedronIndexC, q2 * lambd * mC) wp.atomic_sub(dP, tetrahedronIndexD, q3 * lambd * mD) wp.atomic_add(constraintsNumber, tetrahedronIndexA, 1) wp.atomic_add(constraintsNumber, tetrahedronIndexB, 1) wp.atomic_add(constraintsNumber, tetrahedronIndexC, 1) wp.atomic_add(constraintsNumber, tetrahedronIndexD, 1) @wp.kernel def applyConstraints(predictedPositions: wp.array(dtype=wp.vec3), dP: wp.array(dtype=wp.vec3), constraintsNumber: wp.array(dtype=int)): tid = wp.tid() if(constraintsNumber[tid] > 0): tmpDP = dP[tid] N = float(constraintsNumber[tid]) DP = wp.vec3(tmpDP[0]/N, tmpDP[1]/N, tmpDP[2]/N) predictedPositions[tid] = predictedPositions[tid] + DP dP[tid] = wp.vec3(0.0, 0.0, 0.0) constraintsNumber[tid] = 0 class PBDBasicGravity: @staticmethod def compute(db) -> bool: timeline = omni.timeline.get_timeline_interface() device = "cuda" # # reset on stop # if (timeline.is_stopped()): # context.reset() # initialization if (timeline.is_playing()): with wp.ScopedCudaGuard(): gravity = db.inputs.gravity velocity_dampening = db.inputs.velocity_dampening ground = db.inputs.ground kSDistance = db.inputs.ks_distance kSVolume = db.inputs.ks_volume # convert node inputs to a GPU array positions = wp.array(db.inputs.points, dtype=wp.vec3, device=device) predictedPositions = wp.zeros_like(positions) velocities = wp.array(db.inputs.velocities, dtype=wp.vec3, device=device) inverseMasses = wp.array(db.inputs.inverseMasses, dtype=float, device=device) dP = wp.zeros_like(positions) constraintsNumber = wp.zeros(len(dP), dtype=int, device=device) edgesSplit = np.hsplit(db.inputs.edge, 2) edgesA = wp.array(edgesSplit[0], dtype=int, device=device) edgesB = wp.array(edgesSplit[1], dtype=int, device=device) edgesRestLengths = wp.array(db.inputs.edgesRestLengths, dtype=float, device=device) tetrahedronsSplit = np.hsplit(db.inputs.elem, 4) tetrahedronsA = wp.array(tetrahedronsSplit[0], dtype=int, device=device) tetrahedronsB = wp.array(tetrahedronsSplit[1], dtype=int, device=device) tetrahedronsC = wp.array(tetrahedronsSplit[2], dtype=int, device=device) tetrahedronsD = wp.array(tetrahedronsSplit[3], dtype=int, device=device) tetrahedronsRestVolumes = wp.array(db.inputs.tetrahedronsRestVolumes, dtype=float, device=device) # step simulation with wp.ScopedTimer("Simulate", active=False): # simulate sim_substeps = db.inputs.num_substeps sim_constraints = db.inputs.sim_constraints sim_dt = (1.0/30)/sim_substeps for i in range(sim_substeps): # simulate wp.launch(kernel=gravityKer, dim=len(positions), inputs=[positions, predictedPositions, velocities, gravity, velocity_dampening, sim_dt], device=device) for j in range(sim_constraints): wp.launch( kernel=volumeConstraints, dim=len(tetrahedronsA), inputs=[predictedPositions, dP, constraintsNumber, tetrahedronsA, tetrahedronsB, tetrahedronsC, tetrahedronsD, tetrahedronsRestVolumes, inverseMasses, kSVolume], device=device) wp.launch( kernel=distanceConstraints, dim=len(edgesA), inputs=[predictedPositions, dP, constraintsNumber, edgesA, edgesB, edgesRestLengths, inverseMasses, kSDistance], device=device) wp.launch( kernel=applyConstraints, dim=len(positions), inputs=[predictedPositions, dP, constraintsNumber], device=device) wp.launch(kernel=boundsKer, dim=len(predictedPositions), inputs=[predictedPositions, ground], device=device) wp.launch(kernel=PBDStepKer, dim=len(positions), inputs=[positions, predictedPositions, velocities, sim_dt], device=device) # write node outputs db.outputs.points = positions.numpy() db.outputs.velocities = velocities.numpy() else: with wp.ScopedTimer("Write", active=False): # timeline not playing and sim. not yet initialized, just pass through outputs db.outputs.points = db.inputs.points db.outputs.velocities = db.inputs.velocities
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/nodes/PBDBasicGravity.ogn
{ "PBDBasicGravity": { "version": 1, "description": "PBDBasicGravity", "language": "Python", "metadata": { "uiName": "PBDBasicGravity" }, "inputs": { "points": { "type": "pointf[3][]", "description": "Input points", "default": [] }, "velocities": { "type": "vectorf[3][]", "description": "Input velocities", "default": [] }, "inverseMasses": { "type": "float[]", "description": "Inverse masses", "default": [] }, "edge": { "type": "int[2][]", "description": "Input edges", "default": [] }, "edgesRestLengths": { "type": "float[]", "description": "Input edges rest lengths", "default": [] }, "elem": { "type": "int[4][]", "description": "Input tetrahedrons", "default": [] }, "tetrahedronsRestVolumes": { "type": "float[]", "description": "Input tetrahedrons rest volumes", "default": [] }, "gravity": { "type": "vectorf[3]", "description": "Gravity constant", "default": [0.0, -9.8, 0.0] }, "ground": { "type": "float", "description": "Ground level", "default": -100.0 }, "velocity_dampening": { "type": "float", "description": "", "default": 0.1 }, "ks_distance": { "type": "float", "description": "", "default": 1.0 }, "ks_volume": { "type": "float", "description": "", "default": 1.0 }, "num_substeps": { "type": "int", "description": "", "default": 8 }, "sim_constraints": { "type": "int", "description": "", "default": 1 } }, "outputs": { "points": { "type": "pointf[3][]", "description": "Output points" }, "velocities": { "type": "vectorf[3][]", "description": "Output velocities" } } } }
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/tests/TestPBDBasicGravity.py
import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts import os import carb class TestOgn(ogts.test_case_class(use_schema_prims=True, allow_implicit_graph=False)): async def test_import(self): import mnresearch.tetgen.ogn.PBDBasicGravityDatabase self.assertTrue(hasattr(mnresearch.tetgen.ogn.PBDBasicGravityDatabase, "PBDBasicGravityDatabase")) async def test_usda(self): test_file_name = "PBDBasicGravityTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_mnresearch_tetgen_PBDBasicGravity") self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) self.assertTrue(test_node.get_attribute_exists("inputs:edge")) input_attr = test_node.get_attribute("inputs:edge") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:edge attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:edgesRestLengths")) input_attr = test_node.get_attribute("inputs:edgesRestLengths") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:edgesRestLengths attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:elem")) input_attr = test_node.get_attribute("inputs:elem") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:elem attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:gravity")) input_attr = test_node.get_attribute("inputs:gravity") actual_input = og.Controller.get(input_attr) ogts.verify_values([0.0, -9.8, 0.0], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:gravity attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:ground")) input_attr = test_node.get_attribute("inputs:ground") actual_input = og.Controller.get(input_attr) ogts.verify_values(-100.0, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:ground attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:inverseMasses")) input_attr = test_node.get_attribute("inputs:inverseMasses") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:inverseMasses attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:ks_distance")) input_attr = test_node.get_attribute("inputs:ks_distance") actual_input = og.Controller.get(input_attr) ogts.verify_values(1.0, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:ks_distance attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:ks_volume")) input_attr = test_node.get_attribute("inputs:ks_volume") actual_input = og.Controller.get(input_attr) ogts.verify_values(1.0, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:ks_volume attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:num_substeps")) input_attr = test_node.get_attribute("inputs:num_substeps") actual_input = og.Controller.get(input_attr) ogts.verify_values(8, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:num_substeps attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:points")) input_attr = test_node.get_attribute("inputs:points") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:points attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:sim_constraints")) input_attr = test_node.get_attribute("inputs:sim_constraints") actual_input = og.Controller.get(input_attr) ogts.verify_values(1, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:sim_constraints attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:tetrahedronsRestVolumes")) input_attr = test_node.get_attribute("inputs:tetrahedronsRestVolumes") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:tetrahedronsRestVolumes attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:velocities")) input_attr = test_node.get_attribute("inputs:velocities") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:velocities attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:velocity_dampening")) input_attr = test_node.get_attribute("inputs:velocity_dampening") actual_input = og.Controller.get(input_attr) ogts.verify_values(0.1, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:velocity_dampening attribute value error")
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/tests/__init__.py
"""====== GENERATED BY omni.graph.tools - DO NOT EDIT ======""" import omni.graph.tools as ogt ogt.import_tests_in_directory(__file__, __name__)
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/tests/usd/PBDBasicGravityTemplate.usda
#usda 1.0 ( doc ="""Generated from node description file PBDBasicGravity.ogn Contains templates for node types found in that file.""" ) def OmniGraph "TestGraph" { custom token evaluator:type = "push" custom int2 fileFormatVersion = (1, 3) custom token flatCacheBacking = "Shared" custom token pipelineStage = "pipelineStageSimulation" def OmniGraphNode "Template_mnresearch_tetgen_PBDBasicGravity" ( docs="""PBDBasicGravity""" ) { custom token node:type = "PythonNode" custom token PythonNode:type = "mnresearch.tetgen.PBDBasicGravity" custom int node:typeVersion = 1 # 14 attributes custom int2[] inputs:edge = [] ( docs="""Input edges""" ) custom float[] inputs:edgesRestLengths = [] ( docs="""Input edges rest lengths""" ) custom int4[] inputs:elem = [] ( docs="""Input tetrahedrons""" ) custom vector3f inputs:gravity = (0.0, -9.8, 0.0) ( docs="""Gravity constant""" ) custom float inputs:ground = -100.0 ( docs="""Ground level""" ) custom float[] inputs:inverseMasses = [] ( docs="""Inverse masses""" ) custom float inputs:ks_distance = 1.0 ( docs="""No documentation provided""" ) custom float inputs:ks_volume = 1.0 ( docs="""No documentation provided""" ) custom int inputs:num_substeps = 8 ( docs="""No documentation provided""" ) custom point3f[] inputs:points = [] ( docs="""Input points""" ) custom int inputs:sim_constraints = 1 ( docs="""No documentation provided""" ) custom float[] inputs:tetrahedronsRestVolumes = [] ( docs="""Input tetrahedrons rest volumes""" ) custom vector3f[] inputs:velocities = [] ( docs="""Input velocities""" ) custom float inputs:velocity_dampening = 0.1 ( docs="""No documentation provided""" ) # 2 attributes custom point3f[] outputs:points ( docs="""Output points""" ) custom vector3f[] outputs:velocities ( docs="""Output velocities""" ) } }
mnaskret/omni-tetGen/mnresearch/tetgen/ogn/docs/PBDBasicGravity.rst
.. _GENERATED - Documentation _ognmnresearch.tetgen.PBDBasicGravity: OmniGraph Node mnresearch.tetgen.PBDBasicGravity ================================================ mnresearch.tetgen.PBDBasicGravity Properties -------------------------------------------- +---------------------------+-------------------------+ | Name | Value | +===========================+=========================+ | Version | 1 | +---------------------------+-------------------------+ | Extension | mnresearch.tetgen | +---------------------------+-------------------------+ | Has State? | False | +---------------------------+-------------------------+ | Implementation Language | Python | +---------------------------+-------------------------+ | Default Memory Type | cpu | +---------------------------+-------------------------+ | Generated Code Exclusions | None | +---------------------------+-------------------------+ | uiName | PBDBasicGravity | +---------------------------+-------------------------+ | __language | Python | +---------------------------+-------------------------+ | Generated Class Name | PBDBasicGravityDatabase | +---------------------------+-------------------------+ | Python Module | mnresearch.tetgen | +---------------------------+-------------------------+ mnresearch.tetgen.PBDBasicGravity Description --------------------------------------------- PBDBasicGravity mnresearch.tetgen.PBDBasicGravity Inputs ---------------------------------------- +--------------------------------+--------------+------------------+-----------+---------------------------------+ | Name | Type | Default | Required? | Descripton | +================================+==============+==================+===========+=================================+ | inputs:edge | int[2][] | [] | **Y** | Input edges | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:edgesRestLengths | float[] | [] | **Y** | Input edges rest lengths | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:elem | int[4][] | [] | **Y** | Input tetrahedrons | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:gravity | vectorf[3] | [0.0, -9.8, 0.0] | **Y** | Gravity constant | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [0.0, -9.8, 0.0] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:ground | float | -100.0 | **Y** | Ground level | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | -100.0 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:inverseMasses | float[] | [] | **Y** | Inverse masses | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:ks_distance | float | 1.0 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 1.0 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:ks_volume | float | 1.0 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 1.0 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:num_substeps | int | 8 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 8 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:points | pointf[3][] | [] | **Y** | Input points | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:sim_constraints | int | 1 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 1 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:tetrahedronsRestVolumes | float[] | [] | **Y** | Input tetrahedrons rest volumes | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:velocities | vectorf[3][] | [] | **Y** | Input velocities | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:velocity_dampening | float | 0.1 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 0.1 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ mnresearch.tetgen.PBDBasicGravity Outputs ----------------------------------------- +--------------------+--------------+---------+-----------+-------------------+ | Name | Type | Default | Required? | Descripton | +====================+==============+=========+===========+===================+ | outputs:points | pointf[3][] | None | **Y** | Output points | +--------------------+--------------+---------+-----------+-------------------+ | outputs:velocities | vectorf[3][] | None | **Y** | Output velocities | +--------------------+--------------+---------+-----------+-------------------+
mnaskret/omni-tetGen/mnresearch/tetgen/nodes/PBDBasicGravity.py
""" This is the implementation of the OGN node defined in OgnNewNode.ogn """ # Array or tuple values are accessed as numpy arrays so you probably need this import import math import numpy as np import warp as wp import omni.timeline from pxr import Usd, UsdGeom, Gf, Sdf @wp.kernel def boundsKer(predictedPositions: wp.array(dtype=wp.vec3), groundLevel: float): tid = wp.tid() x = predictedPositions[tid] if(x[1] < groundLevel): predictedPositions[tid] = wp.vec3(x[0], groundLevel, x[2]) @wp.kernel def PBDStepKer(positions: wp.array(dtype=wp.vec3), predictedPositions: wp.array(dtype=wp.vec3), velocities: wp.array(dtype=wp.vec3), dT: float): tid = wp.tid() x = positions[tid] xPred = predictedPositions[tid] v = (xPred - x)*(1.0/dT) x = xPred positions[tid] = x velocities[tid] = v @wp.kernel def gravityKer(positions: wp.array(dtype=wp.vec3), predictedPositions: wp.array(dtype=wp.vec3), velocities: wp.array(dtype=wp.vec3), gravityConstant: wp.vec3, velocityDampening: float, dt: float): tid = wp.tid() x = positions[tid] v = velocities[tid] velocityDampening = 1.0 - velocityDampening v = v + gravityConstant*dt*velocityDampening xPred = x + v*dt predictedPositions[tid] = xPred @wp.kernel def distanceConstraints(predictedPositions: wp.array(dtype=wp.vec3), dP: wp.array(dtype=wp.vec3), constraintsNumber: wp.array(dtype=int), edgesA: wp.array(dtype=int), edgesB: wp.array(dtype=int), edgesRestLengths: wp.array(dtype=float), inverseMasses: wp.array(dtype=float), kS: float): tid = wp.tid() edgeIndexA = edgesA[tid] edgeIndexB = edgesB[tid] edgePositionA = predictedPositions[edgeIndexA] edgePositionB = predictedPositions[edgeIndexB] edgeRestLength = edgesRestLengths[tid] dir = edgePositionA - edgePositionB len = wp.length(dir) inverseMass = inverseMasses[edgeIndexA] + inverseMasses[edgeIndexB] edgeDP = (len-edgeRestLength) * wp.normalize(dir) * kS / inverseMass wp.atomic_sub(dP, edgeIndexA, edgeDP) wp.atomic_add(dP, edgeIndexB, edgeDP) wp.atomic_add(constraintsNumber, edgeIndexA, 1) wp.atomic_add(constraintsNumber, edgeIndexB, 1) @wp.kernel def volumeConstraints(predictedPositions: wp.array(dtype=wp.vec3), dP: wp.array(dtype=wp.vec3), constraintsNumber: wp.array(dtype=int), tetrahedronsA: wp.array(dtype=int), tetrahedronsB: wp.array(dtype=int), tetrahedronsC: wp.array(dtype=int), tetrahedronsD: wp.array(dtype=int), tetrahedronsRestVolumes: wp.array(dtype=float), inverseMasses: wp.array(dtype=float), kS: float): tid = wp.tid() tetrahedronIndexA = tetrahedronsA[tid] tetrahedronIndexB = tetrahedronsB[tid] tetrahedronIndexC = tetrahedronsC[tid] tetrahedronIndexD = tetrahedronsD[tid] tetrahedronPositionA = predictedPositions[tetrahedronIndexA] tetrahedronPositionB = predictedPositions[tetrahedronIndexB] tetrahedronPositionC = predictedPositions[tetrahedronIndexC] tetrahedronPositionD = predictedPositions[tetrahedronIndexD] tetrahedronRestVolume = tetrahedronsRestVolumes[tid] p1 = tetrahedronPositionB - tetrahedronPositionA p2 = tetrahedronPositionC - tetrahedronPositionA p3 = tetrahedronPositionD - tetrahedronPositionA q2 = wp.cross(p3, p1) q1 = wp.cross(p2, p3) q3 = wp.cross(p1, p2) q0 = - q1 - q2 - q3 mA = inverseMasses[tetrahedronIndexA] mB = inverseMasses[tetrahedronIndexB] mC = inverseMasses[tetrahedronIndexC] mD = inverseMasses[tetrahedronIndexD] volume = wp.dot(wp.cross(p1, p2), p3) / 6.0 lambd = mA * wp.dot(q0, q0) + mB * wp.dot(q1, q1) + mC * wp.dot(q2, q2) + mD * wp.dot(q3, q3) lambd = kS * (volume - tetrahedronRestVolume) / lambd wp.atomic_sub(dP, tetrahedronIndexA, q0 * lambd * mA) wp.atomic_sub(dP, tetrahedronIndexB, q1 * lambd * mB) wp.atomic_sub(dP, tetrahedronIndexC, q2 * lambd * mC) wp.atomic_sub(dP, tetrahedronIndexD, q3 * lambd * mD) wp.atomic_add(constraintsNumber, tetrahedronIndexA, 1) wp.atomic_add(constraintsNumber, tetrahedronIndexB, 1) wp.atomic_add(constraintsNumber, tetrahedronIndexC, 1) wp.atomic_add(constraintsNumber, tetrahedronIndexD, 1) @wp.kernel def applyConstraints(predictedPositions: wp.array(dtype=wp.vec3), dP: wp.array(dtype=wp.vec3), constraintsNumber: wp.array(dtype=int)): tid = wp.tid() if(constraintsNumber[tid] > 0): tmpDP = dP[tid] N = float(constraintsNumber[tid]) DP = wp.vec3(tmpDP[0]/N, tmpDP[1]/N, tmpDP[2]/N) predictedPositions[tid] = predictedPositions[tid] + DP dP[tid] = wp.vec3(0.0, 0.0, 0.0) constraintsNumber[tid] = 0 class PBDBasicGravity: @staticmethod def compute(db) -> bool: timeline = omni.timeline.get_timeline_interface() device = "cuda" # # reset on stop # if (timeline.is_stopped()): # context.reset() # initialization if (timeline.is_playing()): with wp.ScopedCudaGuard(): gravity = db.inputs.gravity velocity_dampening = db.inputs.velocity_dampening ground = db.inputs.ground kSDistance = db.inputs.ks_distance kSVolume = db.inputs.ks_volume # convert node inputs to a GPU array positions = wp.array(db.inputs.points, dtype=wp.vec3, device=device) predictedPositions = wp.zeros_like(positions) velocities = wp.array(db.inputs.velocities, dtype=wp.vec3, device=device) inverseMasses = wp.array(db.inputs.inverseMasses, dtype=float, device=device) dP = wp.zeros_like(positions) constraintsNumber = wp.zeros(len(dP), dtype=int, device=device) edgesSplit = np.hsplit(db.inputs.edge, 2) edgesA = wp.array(edgesSplit[0], dtype=int, device=device) edgesB = wp.array(edgesSplit[1], dtype=int, device=device) edgesRestLengths = wp.array(db.inputs.edgesRestLengths, dtype=float, device=device) tetrahedronsSplit = np.hsplit(db.inputs.elem, 4) tetrahedronsA = wp.array(tetrahedronsSplit[0], dtype=int, device=device) tetrahedronsB = wp.array(tetrahedronsSplit[1], dtype=int, device=device) tetrahedronsC = wp.array(tetrahedronsSplit[2], dtype=int, device=device) tetrahedronsD = wp.array(tetrahedronsSplit[3], dtype=int, device=device) tetrahedronsRestVolumes = wp.array(db.inputs.tetrahedronsRestVolumes, dtype=float, device=device) # step simulation with wp.ScopedTimer("Simulate", active=False): # simulate sim_substeps = db.inputs.num_substeps sim_constraints = db.inputs.sim_constraints sim_dt = (1.0/30)/sim_substeps for i in range(sim_substeps): # simulate wp.launch(kernel=gravityKer, dim=len(positions), inputs=[positions, predictedPositions, velocities, gravity, velocity_dampening, sim_dt], device=device) for j in range(sim_constraints): wp.launch( kernel=volumeConstraints, dim=len(tetrahedronsA), inputs=[predictedPositions, dP, constraintsNumber, tetrahedronsA, tetrahedronsB, tetrahedronsC, tetrahedronsD, tetrahedronsRestVolumes, inverseMasses, kSVolume], device=device) wp.launch( kernel=distanceConstraints, dim=len(edgesA), inputs=[predictedPositions, dP, constraintsNumber, edgesA, edgesB, edgesRestLengths, inverseMasses, kSDistance], device=device) wp.launch( kernel=applyConstraints, dim=len(positions), inputs=[predictedPositions, dP, constraintsNumber], device=device) wp.launch(kernel=boundsKer, dim=len(predictedPositions), inputs=[predictedPositions, ground], device=device) wp.launch(kernel=PBDStepKer, dim=len(positions), inputs=[positions, predictedPositions, velocities, sim_dt], device=device) # write node outputs db.outputs.points = positions.numpy() db.outputs.velocities = velocities.numpy() else: with wp.ScopedTimer("Write", active=False): # timeline not playing and sim. not yet initialized, just pass through outputs db.outputs.points = db.inputs.points db.outputs.velocities = db.inputs.velocities
mnaskret/omni-tetGen/mnresearch/tetgen/nodes/__init__.py
""" Dynamically import every file in a directory tree that looks like a Python Ogn Node. This includes linked directories, which is the mechanism by which nodes can be hot-reloaded from the source tree. """ import omni.graph.core as og og.register_ogn_nodes(__file__, "mnresearch.tetgen")
mnaskret/omni-tetGen/mnresearch/tetgen/nodes/PBDBasicGravity.ogn
{ "PBDBasicGravity": { "version": 1, "description": "PBDBasicGravity", "language": "Python", "metadata": { "uiName": "PBDBasicGravity" }, "inputs": { "points": { "type": "pointf[3][]", "description": "Input points", "default": [] }, "velocities": { "type": "vectorf[3][]", "description": "Input velocities", "default": [] }, "inverseMasses": { "type": "float[]", "description": "Inverse masses", "default": [] }, "edge": { "type": "int[2][]", "description": "Input edges", "default": [] }, "edgesRestLengths": { "type": "float[]", "description": "Input edges rest lengths", "default": [] }, "elem": { "type": "int[4][]", "description": "Input tetrahedrons", "default": [] }, "tetrahedronsRestVolumes": { "type": "float[]", "description": "Input tetrahedrons rest volumes", "default": [] }, "gravity": { "type": "vectorf[3]", "description": "Gravity constant", "default": [0.0, -9.8, 0.0] }, "ground": { "type": "float", "description": "Ground level", "default": -100.0 }, "velocity_dampening": { "type": "float", "description": "", "default": 0.1 }, "ks_distance": { "type": "float", "description": "", "default": 1.0 }, "ks_volume": { "type": "float", "description": "", "default": 1.0 }, "num_substeps": { "type": "int", "description": "", "default": 8 }, "sim_constraints": { "type": "int", "description": "", "default": 1 } }, "outputs": { "points": { "type": "pointf[3][]", "description": "Output points" }, "velocities": { "type": "vectorf[3][]", "description": "Output velocities" } } } }
mnaskret/omni-tetGen/mnresearch/tetgen/tests/TestPBDBasicGravity.py
import omni.kit.test import omni.graph.core as og import omni.graph.core.tests as ogts import os import carb class TestOgn(ogts.test_case_class(use_schema_prims=True, allow_implicit_graph=False)): async def test_import(self): import mnresearch.tetgen.ogn.PBDBasicGravityDatabase self.assertTrue(hasattr(mnresearch.tetgen.ogn.PBDBasicGravityDatabase, "PBDBasicGravityDatabase")) async def test_usda(self): test_file_name = "PBDBasicGravityTemplate.usda" usd_path = os.path.join(os.path.dirname(__file__), "usd", test_file_name) if not os.path.exists(usd_path): self.assertTrue(False, f"{usd_path} not found for loading test") (result, error) = await ogts.load_test_file(usd_path) self.assertTrue(result, f'{error} on {usd_path}') test_node = og.Controller.node("/TestGraph/Template_mnresearch_tetgen_PBDBasicGravity") self.assertTrue(test_node.is_valid()) node_type_name = test_node.get_type_name() self.assertEqual(og.GraphRegistry().get_node_type_version(node_type_name), 1) self.assertTrue(test_node.get_attribute_exists("inputs:edge")) input_attr = test_node.get_attribute("inputs:edge") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:edge attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:edgesRestLengths")) input_attr = test_node.get_attribute("inputs:edgesRestLengths") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:edgesRestLengths attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:elem")) input_attr = test_node.get_attribute("inputs:elem") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:elem attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:gravity")) input_attr = test_node.get_attribute("inputs:gravity") actual_input = og.Controller.get(input_attr) ogts.verify_values([0.0, -9.8, 0.0], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:gravity attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:ground")) input_attr = test_node.get_attribute("inputs:ground") actual_input = og.Controller.get(input_attr) ogts.verify_values(-100.0, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:ground attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:inverseMasses")) input_attr = test_node.get_attribute("inputs:inverseMasses") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:inverseMasses attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:ks_distance")) input_attr = test_node.get_attribute("inputs:ks_distance") actual_input = og.Controller.get(input_attr) ogts.verify_values(1.0, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:ks_distance attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:ks_volume")) input_attr = test_node.get_attribute("inputs:ks_volume") actual_input = og.Controller.get(input_attr) ogts.verify_values(1.0, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:ks_volume attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:num_substeps")) input_attr = test_node.get_attribute("inputs:num_substeps") actual_input = og.Controller.get(input_attr) ogts.verify_values(8, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:num_substeps attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:points")) input_attr = test_node.get_attribute("inputs:points") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:points attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:sim_constraints")) input_attr = test_node.get_attribute("inputs:sim_constraints") actual_input = og.Controller.get(input_attr) ogts.verify_values(1, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:sim_constraints attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:tetrahedronsRestVolumes")) input_attr = test_node.get_attribute("inputs:tetrahedronsRestVolumes") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:tetrahedronsRestVolumes attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:velocities")) input_attr = test_node.get_attribute("inputs:velocities") actual_input = og.Controller.get(input_attr) ogts.verify_values([], actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:velocities attribute value error") self.assertTrue(test_node.get_attribute_exists("inputs:velocity_dampening")) input_attr = test_node.get_attribute("inputs:velocity_dampening") actual_input = og.Controller.get(input_attr) ogts.verify_values(0.1, actual_input, "mnresearch.tetgen.PBDBasicGravity USD load test - inputs:velocity_dampening attribute value error")
mnaskret/omni-tetGen/mnresearch/tetgen/tests/__init__.py
"""====== GENERATED BY omni.graph.tools - DO NOT EDIT ======""" import omni.graph.tools as ogt ogt.import_tests_in_directory(__file__, __name__)
mnaskret/omni-tetGen/mnresearch/tetgen/tests/usd/PBDBasicGravityTemplate.usda
#usda 1.0 ( doc ="""Generated from node description file PBDBasicGravity.ogn Contains templates for node types found in that file.""" ) def OmniGraph "TestGraph" { custom token evaluator:type = "push" custom int2 fileFormatVersion = (1, 3) custom token flatCacheBacking = "Shared" custom token pipelineStage = "pipelineStageSimulation" def OmniGraphNode "Template_mnresearch_tetgen_PBDBasicGravity" ( docs="""PBDBasicGravity""" ) { custom token node:type = "PythonNode" custom token PythonNode:type = "mnresearch.tetgen.PBDBasicGravity" custom int node:typeVersion = 1 # 14 attributes custom int2[] inputs:edge = [] ( docs="""Input edges""" ) custom float[] inputs:edgesRestLengths = [] ( docs="""Input edges rest lengths""" ) custom int4[] inputs:elem = [] ( docs="""Input tetrahedrons""" ) custom vector3f inputs:gravity = (0.0, -9.8, 0.0) ( docs="""Gravity constant""" ) custom float inputs:ground = -100.0 ( docs="""Ground level""" ) custom float[] inputs:inverseMasses = [] ( docs="""Inverse masses""" ) custom float inputs:ks_distance = 1.0 ( docs="""No documentation provided""" ) custom float inputs:ks_volume = 1.0 ( docs="""No documentation provided""" ) custom int inputs:num_substeps = 8 ( docs="""No documentation provided""" ) custom point3f[] inputs:points = [] ( docs="""Input points""" ) custom int inputs:sim_constraints = 1 ( docs="""No documentation provided""" ) custom float[] inputs:tetrahedronsRestVolumes = [] ( docs="""Input tetrahedrons rest volumes""" ) custom vector3f[] inputs:velocities = [] ( docs="""Input velocities""" ) custom float inputs:velocity_dampening = 0.1 ( docs="""No documentation provided""" ) # 2 attributes custom point3f[] outputs:points ( docs="""Output points""" ) custom vector3f[] outputs:velocities ( docs="""Output velocities""" ) } }
mnaskret/omni-tetGen/mnresearch/tetgen/docs/PBDBasicGravity.rst
.. _GENERATED - Documentation _ognmnresearch.tetgen.PBDBasicGravity: OmniGraph Node mnresearch.tetgen.PBDBasicGravity ================================================ mnresearch.tetgen.PBDBasicGravity Properties -------------------------------------------- +---------------------------+-------------------------+ | Name | Value | +===========================+=========================+ | Version | 1 | +---------------------------+-------------------------+ | Extension | mnresearch.tetgen | +---------------------------+-------------------------+ | Has State? | False | +---------------------------+-------------------------+ | Implementation Language | Python | +---------------------------+-------------------------+ | Default Memory Type | cpu | +---------------------------+-------------------------+ | Generated Code Exclusions | None | +---------------------------+-------------------------+ | uiName | PBDBasicGravity | +---------------------------+-------------------------+ | __language | Python | +---------------------------+-------------------------+ | Generated Class Name | PBDBasicGravityDatabase | +---------------------------+-------------------------+ | Python Module | mnresearch.tetgen | +---------------------------+-------------------------+ mnresearch.tetgen.PBDBasicGravity Description --------------------------------------------- PBDBasicGravity mnresearch.tetgen.PBDBasicGravity Inputs ---------------------------------------- +--------------------------------+--------------+------------------+-----------+---------------------------------+ | Name | Type | Default | Required? | Descripton | +================================+==============+==================+===========+=================================+ | inputs:edge | int[2][] | [] | **Y** | Input edges | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:edgesRestLengths | float[] | [] | **Y** | Input edges rest lengths | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:elem | int[4][] | [] | **Y** | Input tetrahedrons | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:gravity | vectorf[3] | [0.0, -9.8, 0.0] | **Y** | Gravity constant | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [0.0, -9.8, 0.0] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:ground | float | -100.0 | **Y** | Ground level | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | -100.0 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:inverseMasses | float[] | [] | **Y** | Inverse masses | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:ks_distance | float | 1.0 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 1.0 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:ks_volume | float | 1.0 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 1.0 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:num_substeps | int | 8 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 8 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:points | pointf[3][] | [] | **Y** | Input points | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:sim_constraints | int | 1 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 1 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:tetrahedronsRestVolumes | float[] | [] | **Y** | Input tetrahedrons rest volumes | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:velocities | vectorf[3][] | [] | **Y** | Input velocities | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | [] | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | inputs:velocity_dampening | float | 0.1 | **Y** | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ | | __default | 0.1 | | | +--------------------------------+--------------+------------------+-----------+---------------------------------+ mnresearch.tetgen.PBDBasicGravity Outputs ----------------------------------------- +--------------------+--------------+---------+-----------+-------------------+ | Name | Type | Default | Required? | Descripton | +====================+==============+=========+===========+===================+ | outputs:points | pointf[3][] | None | **Y** | Output points | +--------------------+--------------+---------+-----------+-------------------+ | outputs:velocities | vectorf[3][] | None | **Y** | Output velocities | +--------------------+--------------+---------+-----------+-------------------+
Kim2091/RTXRemixTools/README.md
# RTXRemixTools These are some tools I've made that are intended for use with Nvidia's RTX Remix. Right now I have 3: * **MagicUSDA** - Allows you to generate .usda files based on your gameReadyAssets folder * **LightAdjuster** - A simple script that allows you to adjust light intensity and color temperature in a specified .usda file * **RemixMeshConvert** - This script will convert meshes to be (more) compatible with Remix These should hopefully help with setting up mods for Remix quickly and easily.
Kim2091/RTXRemixTools/LightAdjuster/LightAdjuster.py
import argparse def adjust_value(line, value_name, percentage, log_changes, i): if f'float {value_name} =' in line: parts = line.split('=') old_value = float(parts[1].strip()) new_value = old_value * percentage new_line = f'{parts[0]}= {new_value}\n' if log_changes: log_line = f'Line {i + 1}: {line.strip()} -> {new_line.strip()}' print(log_line) with open('changes.log', 'a') as log: log.write(log_line + '\n') line = new_line return line, True return line, False def adjust_file(file_path, start_line=1, log_changes=False, adjust_intensity=False, adjust_color_temperature=False, percentage=None): with open(file_path, 'r') as file: data = file.readlines() lines_changed = 0 with open(file_path, 'w') as file: for i, line in enumerate(data): if i + 1 >= start_line: if adjust_intensity: line, changed = adjust_value(line, 'intensity', percentage, log_changes, i) if changed: lines_changed += 1 if adjust_color_temperature: line, changed = adjust_value(line, 'colorTemperature', percentage, log_changes, i) if changed: lines_changed += 1 file.write(line) print(f'Completed! {lines_changed} lines changed.') if __name__ == '__main__': parser = argparse.ArgumentParser(description='Adjust the intensity and/or color temperature values in a file.') parser.add_argument('file_path', type=str, help='The path to the file to modify.') parser.add_argument('-s', '--start-line', type=int, default=1, help='The line number to start modifying at.') parser.add_argument('-l', '--log', action='store_true', help='Whether to print a log of the changed lines.') parser.add_argument('-ai', '--adjust-intensity', action='store_true', help='Whether to adjust the intensity value.') parser.add_argument('-act', '--adjust-color-temperature', action='store_true', help='Whether to adjust the color temperature value.') parser.add_argument('-p', '--percentage', type=float, required=True, help='The percentage to adjust the value by.') args = parser.parse_args() adjust_file(args.file_path, args.start_line, args.log, args.adjust_intensity, args.adjust_color_temperature, args.percentage)
Kim2091/RTXRemixTools/LightAdjuster/README.md
# **Remix Light Adjuster** *Written with the assistance of Bing* This script adjusts the intensity and/or color temperature values in a file. $\color{#f7d26a}{\textsf{Please back up your usda files before running!}}$ ## Usage To use this script, run the following command: `python LightAdjuster.py file_path` where `file_path` is the path to the .usda file to modify. There are several additional options that can be used with this script: * `-s` or `--start-line` - This option allows you to specify the line number to start modifying at. The default value is 1. * `-l` or `--log` - This option enables logging of the changed lines. If this option is used, a log of the changed lines will be printed to the console and written to a file named `changes.log`. * `-p` or `--percentage` - This option specifies the percentage to adjust the value by. This option is required. * `-ai` or `--adjust-intensity` - This option enables adjustment of the intensity value using `-p`. * `-act` or `--adjust-color-temperature` - This option enables adjustment of the color temperature value using `-p`. For example, to adjust the intensity value in a file named `data.txt`, starting at line 5, and logging the changes, you would run the following command: `python adjust_file.py data.txt -s 5 -l -ai -p 0.5` This would adjust the intensity value in all lines containing `float intensity =`, starting at line 5, by multiplying it by 0.5. A log of the changed lines would be printed to the console and written to a file named `changes.log`. ## Description This script reads the specified file and modifies lines that contain either `float intensity =` or `float colorTemperature =`, depending on which value is being adjusted. The value is multiplied by the specified percentage and the line is updated with the new value. If logging is enabled, a log of the changed lines is printed to the console and written to a file named `changes.log`. After all lines have been processed, the script prints a message indicating how many lines were changed.
Kim2091/RTXRemixTools/MagicUSDA/requirements.txt
usd-core
Kim2091/RTXRemixTools/MagicUSDA/README.md
# Remix USDA Generator *Written with the assistance of Bing and ChatGPT* $\color{#f7d26a}{\textsf{Please back up your usda files to a separate folder before running!}}$ This is a script to generate `.usda` files from your gameReadyAssets folder. It detects any of these map types in your folder: - emissive - normal - metallic - rough ## Usage How to use this script: `python MagicUSDA.py -d path\to\gameReadyAssets` There are some additional functions: * `-o` - Change the output usda file names. * `-m` - Split the output USDA files into separate entries for each map type (e.g. mod_emissive.usda, mod_metallic.usda). Works with `-o` to change the base file name. * `-a` - Add sublayers made with `-m` to the mod.usda file. Not compatible with custom files specified by `-o`, will only modify mod.usda. Works with `-m` and `-o`. * `-g` - Toggle generating hashes for file names before the suffix. Useful for files with generic names like test.dds. Diffuse textures must be identical to Remix dumps. * `-s` - Change between the AperturePBR_Opacity and AperturePBR_Translucent material shader types. Using this, you can generate separate .usda files for normal or translucent objects easily * `-r` _**Currently broken**_ - Specify a separate folder to use as a reference for generating diffuse texture hashes. Searches for files in the reference directory based on file names from the base directory. If not provided, uses the main directory to generate hashes. Useful with folders like captures or game texture rips. The `.usda` files generated by this script serve to replace textures in your Remix games, allowing you to swap out textures and utilize additional map types to enhance the game's visuals. This script is intended to be used with original diffuse textures, which are required for it to function correctly. It generates a `mod.usda` file for use in your game through Remix. It was designed with [chaiNNer](https://chainner.app/) in mind, however you can use this with any textures you've created. Be aware that this script will overwrite any pre-existing `mod.usda` files in your directory!
Kim2091/RTXRemixTools/MagicUSDA/MagicUSDA.py
import os import argparse import xxhash from pxr import Usd, UsdGeom, UsdShade, Sdf suffixes = ["_normal", "_emissive", "_metallic", "_rough"] def generate_hashes(file_path) -> str: # Read the file and extract the raw data. Thanks @BlueAmulet! with open(file_path, "rb") as file: data = file.read(128) dwHeight = int.from_bytes(data[12:16], "little") dwWidth = int.from_bytes(data[16:20], "little") pfFlags = int.from_bytes(data[80:84], "little") pfFourCC = data[84:88] bitCount = int.from_bytes(data[88:92], "little") mipsize = dwWidth * dwHeight if pfFlags & 0x4: # DDPF_FOURCC if pfFourCC == b"DXT1": # DXT1 is 4bpp mipsize //= 2 elif pfFlags & 0x20242: # DDPF_ALPHA | DDPF_RGB | DDPF_YUV | DDPF_LUMINANCE mipsize = mipsize * bitCount // 8 # Read the required portion of the file for hash calculation with open(file_path, "rb") as file: file.seek(128) # Move the file pointer to the appropriate position data = file.read(mipsize) hash_value = xxhash.xxh3_64(data).hexdigest() return hash_value.upper() def write_usda_file(args, file_list, suffix=None) -> [list, list]: created_files = [] modified_files = [] game_ready_assets_path = os.path.join(args.directory) # Check if there are any texture files with the specified suffix if suffix: has_suffix_files = False for file_name in file_list: if file_name.endswith(f"{suffix}.dds"): has_suffix_files = True break if not has_suffix_files: # return a blank set return [created_files, modified_files] usda_file_name = f'{args.output}{suffix if suffix else ""}.usda' usda_file_path = os.path.join(game_ready_assets_path, usda_file_name) if os.path.exists(usda_file_path): modified_files.append(usda_file_path) else: created_files.append(usda_file_path) targets = {} reference_directory = args.reference_directory if args.reference_directory else args.directory for file_name in file_list: if file_name.endswith(".dds"): # Extract only the file name from the absolute path name = os.path.basename(file_name) name, ext = os.path.splitext(name) if "_" not in name or name.endswith("_diffuse") or name.endswith("_albedo"): # Check if the generate_hashes argument is specified if args.generate_hashes: key = name.split("_")[0] # Use the prefix of the diffuse file name as the key hash_value = generate_hashes(os.path.join(reference_directory, file_name)) # Generate hash for the diffuse file else: key = os.path.basename(name) hash_value = key # Use the original name as the hash value # Check if the key contains a hash or ends with _diffuse or _albedo if not (key.isupper() and len(key) == 16) and not (key.endswith("_diffuse") or key.endswith("_albedo")): continue # Remove the _diffuse or _albedo suffix from the key and hash_value key = key.replace("_diffuse", "").replace("_albedo", "") hash_value = hash_value.replace("_diffuse", "").replace("_albedo", "") # Get the relative path from the game ready assets path to the texture file rel_file_path = os.path.relpath(file_name, args.directory) targets[key] = (rel_file_path, hash_value) # Create a new stage stage = Usd.Stage.CreateNew(usda_file_path) # Modify the existing RootNode prim root_node_prim = stage.OverridePrim("/RootNode") # Add a Looks scope as a child of the RootNode prim looks_scope = UsdGeom.Scope.Define(stage, "/RootNode/Looks") added_targets = set() for value, (rel_file_path, hash_value) in targets.items(): # Check if there is a corresponding texture file for the specified suffix if suffix and not any( file_name.endswith(f"{value}{suffix}.dds") for file_name in file_list ): continue if value in added_targets: continue else: added_targets.add(value) print(f"Adding texture {rel_file_path} with hash: {hash_value}") # Add a material prim as a child of the Looks scope material_prim = UsdShade.Material.Define( stage, f"/RootNode/Looks/mat_{hash_value.upper()}" ) material_prim.GetPrim().GetReferences().SetReferences([]) # Set the shader attributes shader_prim = UsdShade.Shader.Define( stage, f"/RootNode/Looks/mat_{hash_value.upper()}/Shader" ) shader_prim.GetPrim().CreateAttribute("info:mdl:sourceAsset", Sdf.ValueTypeNames.Asset).Set( f"{args.shader_type}.mdl" ) shader_prim.GetPrim().CreateAttribute("info:implementationSource", Sdf.ValueTypeNames.Token).Set( "sourceAsset" ) shader_prim.GetPrim().CreateAttribute("info:mdl:sourceAsset:subIdentifier", Sdf.ValueTypeNames.Token).Set( f"{args.shader_type}" ) shader_output = shader_prim.CreateOutput("output", Sdf.ValueTypeNames.Token) if not suffix or suffix == "_diffuse" or suffix == "_albedo": diffuse_texture = shader_prim.CreateInput( "diffuse_texture", Sdf.ValueTypeNames.Asset ) # Use the dynamically generated relative path for the diffuse texture diffuse_texture.Set(f".\{rel_file_path}") # Process each type of texture if not suffix or suffix == "_emissive": emissive_file_name = f"{value}_emissive.dds" # print(f"Emissive File Name: {emissive_file_name in file_list}") # print(file_list) if any(file_path.endswith(emissive_file_name) for file_path in file_list): emissive_mask_texture = shader_prim.CreateInput( "emissive_mask_texture", Sdf.ValueTypeNames.Asset ) # Use the dynamically generated relative path for the emissive texture emissive_rel_file_path = os.path.relpath(os.path.join(os.path.dirname(file_name), emissive_file_name), args.directory) emissive_mask_texture.Set(f".\{emissive_rel_file_path}") enable_emission = shader_prim.CreateInput( "enable_emission", Sdf.ValueTypeNames.Bool ) enable_emission.Set(True) emissive_intensity = shader_prim.CreateInput( "emissive_intensity", Sdf.ValueTypeNames.Float ) emissive_intensity.Set(5) if not suffix or suffix == "_metallic": metallic_file_name = f"{value}_metallic.dds" if any(file_path.endswith(metallic_file_name) for file_path in file_list): metallic_texture = shader_prim.CreateInput( "metallic_texture", Sdf.ValueTypeNames.Asset ) # Use the dynamically generated relative path for the metallic texture metallic_rel_file_path = os.path.relpath(os.path.join(os.path.dirname(file_name), metallic_file_name), args.directory) metallic_texture.Set(f".\{metallic_rel_file_path}") if not suffix or suffix == "_normal": normal_file_name = f"{value}_normal.dds" if any(file_path.endswith(normal_file_name) for file_path in file_list): normalmap_texture = shader_prim.CreateInput( "normal_texture", Sdf.ValueTypeNames.Asset ) # Use the dynamically generated relative path for the normal texture normal_rel_file_path = os.path.relpath(os.path.join(os.path.dirname(file_name), normal_file_name), args.directory) normalmap_texture.Set(f".\{normal_rel_file_path}") if not suffix or suffix == "_rough": roughness_file_name = f"{value}_rough.dds" if any(file_path.endswith(roughness_file_name) for file_path in file_list): reflectionroughness_texture = shader_prim.CreateInput( "reflectionroughness_texture", Sdf.ValueTypeNames.Asset ) # Use the dynamically generated relative path for the roughness texture roughness_rel_file_path = os.path.relpath(os.path.join(os.path.dirname(file_name), roughness_file_name), args.directory) reflectionroughness_texture.Set(f".\{roughness_rel_file_path}") # Connect shader output to material inputs material_prim.CreateInput( "mdl:displacement", Sdf.ValueTypeNames.Token ).ConnectToSource(shader_output) material_prim.CreateInput( "mdl:surface", Sdf.ValueTypeNames.Token ).ConnectToSource(shader_output) material_prim.CreateInput( "mdl:volume", Sdf.ValueTypeNames.Token ).ConnectToSource(shader_output) # Save the stage stage.Save() return [modified_files, created_files] def add_sublayers(args, file_list) -> list: modified_files = [] game_ready_assets_path = os.path.join(args.directory) mod_file_path = os.path.join(game_ready_assets_path, "mod.usda") if os.path.exists(mod_file_path): modified_files.append(mod_file_path) # Open the existing stage stage = Usd.Stage.Open(mod_file_path) # Get the existing sublayers existing_sublayers = list(stage.GetRootLayer().subLayerPaths) # Create a set of existing sublayer file names existing_sublayer_files = { os.path.basename(sublayer_path) for sublayer_path in existing_sublayers } # Add new sublayers new_sublayers = [ f"./{args.output}{suffix}.usda" for suffix in suffixes if f"{args.output}{suffix}.usda" not in existing_sublayer_files and any( os.path.basename(file_path) == f"{args.output}{suffix}.usda" for file_path in file_list ) ] stage.GetRootLayer().subLayerPaths = (existing_sublayers + new_sublayers) # Save the stage stage.Save() return modified_files if __name__ == "__main__": # ARGUMENT BLOCK parser = argparse.ArgumentParser() parser.add_argument("-d", "--directory", required=True, help="Path to directory") parser.add_argument("-o", "--output", default="mod", help="Output file name") parser.add_argument("-g", "--generate-hashes", action="store_true", help="Generates hashes for file names before the suffix") parser.add_argument("-m", "--multiple-files", action="store_true", help="Save multiple .usda files, one for each suffix type (except for diffuse)") parser.add_argument("-a", "--add-sublayers", action="store_true", help="Add sublayers made with -m to the mod.usda file. This argument only modifies the mod.usda file and does not affect any custom USDA file specified by the -o argument.") parser.add_argument("-s", "--shader-type", default="AperturePBR_Opacity", choices=["AperturePBR_Opacity", "AperturePBR_Translucent"], help="Shader type") parser.add_argument("-r", "--reference-directory", help="Path to reference directory for diffuse texture hashes") args = parser.parse_args() # Check target processing directory before use if not os.path.isdir(args.directory): raise FileNotFoundError("Specified processing directory (-d) is invalid") # Recursively scan folders file_list = [] for root, dirs, files in os.walk(args.directory): for file in files: file_list.append(os.path.join(root, file)) created_files = [] modified_files = [] # Process sublayer additions print(f"Add Sublayers: {args.add_sublayers}") if args.add_sublayers: modified_files.extend(add_sublayers(args, file_list)) # Generate unique USDA files per suffix type (except diffuse) if args.multiple_files: for suffix in suffixes: m, c = write_usda_file(args, file_list, suffix) modified_files.extend(m), created_files.extend(c) else: # Generate a single USDA file for all suffixes m, c = write_usda_file(args, file_list) modified_files.extend(m), created_files.extend(c) # Complete print("Finished!") print("Created files:") for file in created_files: print(f" - {file}") print("Modified files:") for file in modified_files: print(f" - {file}")
Kim2091/RTXRemixTools/RemixMeshConvert/RemixMeshConvert.py
import argparse import logging import os import shutil import sys from pxr import Usd, UsdGeom, Gf, Sdf ALIASES = { "primvars:UVMap": ("primvars:st", Sdf.ValueTypeNames.Float2Array), "primvars:UVChannel_1": ("primvars:st1", Sdf.ValueTypeNames.Float2Array), "primvars:map1": ("primvars:st1", Sdf.ValueTypeNames.Float2Array), # Add more aliases here } def convert_face_varying_to_vertex_interpolation(usd_file_path): stage = Usd.Stage.Open(usd_file_path) mesh_prims = [prim for prim in stage.TraverseAll() if prim.IsA(UsdGeom.Mesh)] for prim in mesh_prims: mesh = UsdGeom.Mesh(prim) indices = prim.GetAttribute("faceVertexIndices") points = prim.GetAttribute("points") if not indices or not points: continue # Skip if the required attributes are missing points_arr = points.Get() modified_points = [points_arr[i] for i in indices.Get()] points.Set(modified_points) indices.Set([i for i in range(len(indices.Get()))]) mesh.SetNormalsInterpolation(UsdGeom.Tokens.vertex) primvar_api = UsdGeom.PrimvarsAPI(prim) for var in primvar_api.GetPrimvars(): if var.GetInterpolation() == UsdGeom.Tokens.faceVarying: var.SetInterpolation(UsdGeom.Tokens.vertex) # Replace aliases with "float2[] primvars:st" if var.GetName() in ALIASES: new_name, new_type_name = ALIASES[var.GetName()] new_var = primvar_api.GetPrimvar(new_name) if new_var: new_var.Set(var.Get()) else: new_var = primvar_api.CreatePrimvar(new_name, new_type_name) new_var.Set(var.Get()) new_var.SetInterpolation(UsdGeom.Tokens.vertex) # Set interpolation to vertex primvar_api.RemovePrimvar(var.GetBaseName()) return stage def process_folder(input_folder, output_folder, output_extension=None): for file_name in os.listdir(input_folder): input_file = os.path.join(input_folder, file_name) if output_extension: file_name = os.path.splitext(file_name)[0] + '.' + output_extension output_file = os.path.join(output_folder, file_name) if not os.path.isfile(input_file): continue shutil.copy(input_file, output_file) # Make a copy of the input file and rename it to the output file stage = convert_face_varying_to_vertex_interpolation(output_file) stage.Save() # Modify the output file in place logging.info(f"Processed file: {input_file} -> {output_file}") def main(): parser = argparse.ArgumentParser(description='Convert USD file formats and interpolation of meshes.') parser.add_argument('input', type=str, help='Input file or folder path') parser.add_argument('output', type=str, help='Output file or folder path') parser.add_argument('-f', '--format', type=str, choices=['usd', 'usda'], help='Output file format (usd or usda)') args = parser.parse_args() input_path = args.input output_path = args.output output_extension = args.format logging.basicConfig(level=logging.INFO, format='%(message)s') if os.path.isdir(input_path): process_folder(input_path, output_path, output_extension) else: if output_extension: output_path = os.path.splitext(output_path)[0] + '.' + output_extension shutil.copy(input_path, output_path) # Make a copy of the input file and rename it to the output file stage = convert_face_varying_to_vertex_interpolation(output_path) stage.Save() # Modify the output file in place logging.info(f"Processed file: {input_path} -> {output_path}") if __name__ == '__main__': main()
Kim2091/RTXRemixTools/RemixMeshConvert/requirements.txt
usd-core
Kim2091/RTXRemixTools/RemixMeshConvert/README.md
## RemixMeshConvert $\color{#f7d26a}{\textsf{Use this instead. It integrates directly into Omniverse:}}$ https://github.com/Ekozmaster/NvidiaOmniverseRTXRemixTools <details> <summary>Old description:</summary> *Based on a script originally written by E-man* $\color{#f7d26a}{\textsf{Please back up your USD and USDA files before running!}}$ **How to use this script:** To convert a single file: `python RemixMeshConvert.py [input.usda] [output.usda]` To batch convert a folder: `python RemixMeshConvert.py path\to\input\folder path\to\output\folder -f [usd or usda]` **Arguments:** `-f` `--output-format` - This controls the output format when using the script in **batch** mode **Description:** This script takes USD files as input, makes a copy named as the output, converts the interpolation of all meshes in the given USD file from face-varying to vertex, and finally saves the modified stages to the new USD files. It can process a single file or a folder of files, and also includes a dictionary of aliases for replacing specific primvar names with `float2[] primvars:st1`. **For your final exports to use in-game, please save as USD! USDA files are very inefficient in comparison** Please refer to `requirements.txt` for necessary Python libraries. </details>
Kim2091/RTXRemixTools/RemixMeshConvert/For USD Composer/RemixMeshConvert_OV.py
from pxr import Usd, UsdGeom, Sdf ALIASES = { "primvars:UVMap": ("primvars:st", Sdf.ValueTypeNames.Float2Array), "primvars:UVChannel_1": ("primvars:st1", Sdf.ValueTypeNames.Float2Array), "primvars:map1": ("primvars:st1", Sdf.ValueTypeNames.Float2Array), # Add more aliases here } def convert_face_varying_to_vertex_interpolation(stage): mesh_prims = [prim for prim in stage.TraverseAll() if prim.IsA(UsdGeom.Mesh)] for prim in mesh_prims: mesh = UsdGeom.Mesh(prim) indices = prim.GetAttribute("faceVertexIndices") points = prim.GetAttribute("points") if not indices or not points: continue # Skip if the required attributes are missing points_arr = points.Get() modified_points = [points_arr[i] for i in indices.Get()] points.Set(modified_points) indices.Set([i for i in range(len(indices.Get()))]) mesh.SetNormalsInterpolation(UsdGeom.Tokens.vertex) primvar_api = UsdGeom.PrimvarsAPI(prim) for var in primvar_api.GetPrimvars(): if var.GetInterpolation() == UsdGeom.Tokens.faceVarying: var.SetInterpolation(UsdGeom.Tokens.vertex) # Replace aliases with "float2[] primvars:st" if var.GetName() in ALIASES: new_name, new_type_name = ALIASES[var.GetName()] new_var = primvar_api.GetPrimvar(new_name) if new_var: new_var.Set(var.Get()) else: new_var = primvar_api.CreatePrimvar(new_name, new_type_name) new_var.Set(var.Get()) new_var.SetInterpolation(UsdGeom.Tokens.vertex) # Set interpolation to vertex # Remove the old primvar directly from the UsdGeomPrimvar object var.GetAttr().Block() return stage stage = omni.usd.get_context().get_stage() convert_face_varying_to_vertex_interpolation(stage)
Kim2091/RTXRemixTools/RemixMeshConvert/For USD Composer/README.md
## RemixMeshConvert *Based on a script originally written by E-man* $\color{#f7d26a}{\textsf{Please back up your USD and USDA files before running!}}$ **How to use this script:** * Install USD Composer: https://www.nvidia.com/en-us/omniverse/apps/create/ * Once launched, open the Script Editor in Window > Script Editor * Load your mesh files by dragging it into the pane on the right * Run the script For more information, look at [this thread](https://discord.com/channels/1028444667789967381/1096847508002590760/1123306156773879928) in the [RTX Remix Showcase server](https://discord.gg/rtxremix) **Description:** The RemixMeshConvert_OV script is only for usage within Omniverse's USD Composer. If you want to process files and folders independently of Omniverse, use RemixMeshConvert in the directory above this one. **For your final exports to use in-game, please save as USD! USDA files are very inefficient in comparison**
gigwegbe/synthetic_data_with_nvidia_replicator_and_edge_impulse/objects_position_normal_90.py
import omni.replicator.core as rep with rep.new_layer(): # Load in asset local_path = "/home/george/Documents/synthetic_data_with_nvidia_replicator_and_edge_impulse/" TABLE_USD = f"{local_path}/asset/Collected_EastRural_Table/EastRural_Table.usd" SPOON_SMALL_USD = f"{local_path}/asset/Collected_Spoon_Small/Spoon_Small.usd" SPOON_BIG_USD = f"{local_path}/asset/Collected_Spoon_Big/Spoon_Big.usd" FORK_SMALL_USD = f"{local_path}/asset/Collected_Fork_Small/Fork_Small.usd" FORK_BIG_USD = f"{local_path}/asset/Collected_Fork_Big/Fork_Big.usd" KNIFE_USD = f"{local_path}/asset/Collected_Knife/Knife.usd" # Camera paramters cam_position = (46, 200, 25) cam_position2 = (46, 120, 25) cam_position_random = rep.distribution.uniform((0, 181, 0), (0, 300, 0)) cam_rotation = (-90, 0, 0) focus_distance = 114 focus_distance2 = 39.1 focal_length = 27 focal_length2 = 18.5 f_stop = 1.8 f_stop2 = 1.8 focus_distance_random = rep.distribution.normal(500.0, 100) # Cultery path current_cultery = SPOON_SMALL_USD # Change the item here e.g KNIFE_USD output_path = current_cultery.split(".")[0].split("/")[-1] def rect_lights(num=1): lights = rep.create.light( light_type="rect", temperature=rep.distribution.normal(6500, 500), intensity=rep.distribution.normal(0, 5000), position=(45, 110, 0), rotation=(-90, 0, 0), scale=rep.distribution.uniform(50, 100), count=num ) return lights.node def dome_lights(num=3): lights = rep.create.light( light_type="dome", temperature=rep.distribution.normal(6500, 500), intensity=rep.distribution.normal(0, 1000), position=(45, 120, 18), rotation=(225, 0, 0), count=num ) return lights.node def table(): table = rep.create.from_usd(TABLE_USD, semantics=[('class', 'table')]) with table: rep.modify.pose( position=(46, -0.0, 20), rotation=(0, -90, -90), ) return table # Define randomizer function for CULTERY assets. This randomization includes placement and rotation of the assets on the surface. def cutlery_props(size=15): instances = rep.randomizer.instantiate(rep.utils.get_usd_files( current_cultery), size=size, mode='point_instance') with instances: rep.modify.pose( position=rep.distribution.uniform( (0, 76.3651, 0), (90, 76.3651, 42)), rotation=rep.distribution.uniform( (-90, -180, 0), (-90, 180, 0)), ) return instances.node # Register randomization rep.randomizer.register(table) rep.randomizer.register(cutlery_props) rep.randomizer.register(rect_lights) rep.randomizer.register(dome_lights) # Multiple setup cameras and attach it to render products camera = rep.create.camera(focus_distance=focus_distance, focal_length=focal_length, position=cam_position, rotation=cam_rotation, f_stop=f_stop) camera2 = rep.create.camera(focus_distance=focus_distance2, focal_length=focal_length2, position=cam_position2, rotation=cam_rotation, f_stop=f_stop) # Will render 1024x1024 images and 512x512 images render_product = rep.create.render_product(camera, (1024, 1024)) render_product2 = rep.create.render_product(camera2, (512, 512)) # Initialize and attach writer writer = rep.WriterRegistry.get("BasicWriter") writer.initialize(output_dir=f"{local_path}/data/normal/{output_path}", rgb=True, bounding_box_2d_tight=False, semantic_segmentation=False) writer.attach([render_product, render_product2]) with rep.trigger.on_frame(num_frames=50): rep.randomizer.table() rep.randomizer.rect_lights(1) rep.randomizer.dome_lights(1) rep.randomizer.cutlery_props(5) # Run the simulation graph rep.orchestrator.run()
gigwegbe/synthetic_data_with_nvidia_replicator_and_edge_impulse/objects_position_random.py
import omni.replicator.core as rep with rep.new_layer(): # Load in asset local_path = "/home/george/Documents/synthetic_data_with_nvidia_replicator_and_edge_impulse/" TABLE_USD = f"{local_path}/asset/Collected_EastRural_Table/EastRural_Table.usd" SPOON_SMALL_USD = f"{local_path}/asset/Collected_Spoon_Small/Spoon_Small.usd" SPOON_BIG_USD = f"{local_path}/asset/Collected_Spoon_Big/Spoon_Big.usd" FORK_SMALL_USD = f"{local_path}/asset/Collected_Fork_Small/Fork_Small.usd" FORK_BIG_USD = f"{local_path}/asset/Collected_Fork_Big/Fork_Big.usd" KNIFE_USD = f"{local_path}/asset/Collected_Knife/Knife.usd" # Camera paramters cam_position = (46, 200, 25) cam_position2 = (46, 120, 25) cam_position_random = rep.distribution.uniform((0, 181, 0), (0, 300, 0)) cam_rotation = (-90, 0, 0) focus_distance = 114 focus_distance2 = 39.1 focal_length = 27 focal_length2 = 18.5 f_stop = 1.8 f_stop2 = 1.8 focus_distance_random = rep.distribution.normal(500.0, 100) # Cultery path current_cultery = SPOON_SMALL_USD # Change the item here e.g KNIFE_USD output_path = current_cultery.split(".")[0].split("/")[-1] def rect_lights(num=1): lights = rep.create.light( light_type="rect", temperature=rep.distribution.normal(6500, 500), intensity=rep.distribution.normal(0, 5000), position=(45, 110, 0), rotation=(-90, 0, 0), scale=rep.distribution.uniform(50, 100), count=num ) return lights.node def dome_lights(num=3): lights = rep.create.light( light_type="dome", temperature=rep.distribution.normal(6500, 500), intensity=rep.distribution.normal(0, 1000), position=(45, 120, 18), rotation=(225, 0, 0), count=num ) return lights.node def table(): table = rep.create.from_usd(TABLE_USD, semantics=[('class', 'table')]) with table: rep.modify.pose( position=(46, -0.0, 20), rotation=(0, -90, -90), ) return table # Define randomizer function for CULTERY assets. This randomization includes placement and rotation of the assets on the surface. def cutlery_props(size=15): instances = rep.randomizer.instantiate(rep.utils.get_usd_files( current_cultery), size=size, mode='point_instance') with instances: rep.modify.pose( position=rep.distribution.uniform( (0, 86.3651, 0), (90, 86.3651, 42)), rotation=rep.distribution.uniform( (-90, -180, -90), (90, 180, 90)), ) return instances.node # Register randomization rep.randomizer.register(table) rep.randomizer.register(cutlery_props) rep.randomizer.register(rect_lights) rep.randomizer.register(dome_lights) # Multiple setup cameras and attach it to render products camera = rep.create.camera(focus_distance=focus_distance, focal_length=focal_length, position=cam_position, rotation=cam_rotation, f_stop=f_stop) camera2 = rep.create.camera(focus_distance=focus_distance2, focal_length=focal_length2, position=cam_position2, rotation=cam_rotation, f_stop=f_stop) # Will render 1024x1024 images and 512x512 images render_product = rep.create.render_product(camera, (1024, 1024)) render_product2 = rep.create.render_product(camera2, (512, 512)) # Initialize and attach writer writer = rep.WriterRegistry.get("BasicWriter") writer.initialize(output_dir=f"{local_path}/data/random/{output_path}", rgb=True, bounding_box_2d_tight=False, semantic_segmentation=False) writer.attach([render_product, render_product2]) with rep.trigger.on_frame(num_frames=25): rep.randomizer.table() rep.randomizer.rect_lights(1) rep.randomizer.dome_lights(1) rep.randomizer.cutlery_props(5) # Run the simulation graph rep.orchestrator.run()