diff --git a/.env_sample b/.env_sample index 599d8e0..592b593 100644 --- a/.env_sample +++ b/.env_sample @@ -1,23 +1,40 @@ LOCAL_WAR=./dataverse.war -#COMPOSE_FILE=./docker-compose.yml +COMPOSE_FILE=distros/dataverse.no/docker-compose.yaml +CONFIGURATION_PATH=/distrib/private +DOCROOT=/distrib +LOGS_PATH=/distrib/private/logs +DOCKER_HUB=dockerhub/dataverseno +VERSION=5.13.no +#DOCKER_HUB=coronawhy +SECRETS_DIR="${CONFIGURATION_PATH}/secrets" +POSTGRESTMP=/mnt/tmp/postgres -# Activate Dataverse language pack by setting language code: -# en - English hu - Hungarian fr - French sl - Slovenian -# se - Swedish es - Spanish it - Italian ua - Ukrainian -# pt - Portuguese ru - Russian at - Austrian German -# br - Brazilian Portuguese ca - French Canadian -#MAINLANG=en +#dataverse +WEBANALYTICSON=true +TESTBANNER=true +BASEURL="https://....blob.core.windows.net/data1" +KEYWINDOWSBLOB=key # Dataverse database settings DATAVERSE_DB_HOST=postgres DATAVERSE_DB_USER=dataverse DATAVERSE_DB_PASSWORD=dvnsecret DATAVERSE_DB_NAME=dataverse +PASSWORD_FILE=/secrets/asadminpwd + + +# Solr SOLR_SERVICE_HOST=solr:8983 SOLR_SERVICE_PORT=8983 DATAVERSE_URL=localhost:8080 DATAVERSE_SERVICE_HOST=localhost +LOCAL_STORAGE=/mntblob + +# Conter Processor +COUNTERPROSVERSION=0.1.04 +GEOIPLICENSE=Licence +CONFIG_FILE=counter-processor-config.yaml # Postgres settings POSTGRES_USER=dataverse @@ -27,15 +44,10 @@ POSTGRES_DATABASE=dataverse POSTGRES_DB=dataverse # Domain configuration and init folder -#hostname=www.yourdataverse.org -hostname=locahost:8080 -#traefikhost=www.yourdataverse.org -traefikhost=localhost:8080 +hostname=dataverse.no +traefikhost=dataverse.no INIT_SCRIPTS_FOLDER=/opt/payara/init.d -# traefik email settings -useremail=youremail@domain.com - # Webhook configuration to bundle external services WEBHOOK=/opt/payara/triggers/external-services.py #CESSDA=True @@ -43,44 +55,43 @@ WEBHOOK=/opt/payara/triggers/external-services.py # DOI parameters # https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring -#doi_authority=doi_authority -#doi_provider=doi_provider -#doi_shoulder=doi_shoulder -#doi_username=doi_username -#doi_password=doi_password -dataciterestapiurlstring=https\\:\/\/api.test.datacite.org +doi_authority=10.21337 +doi_provider=DataCite +doi_username=doiusername +doi_password=doipassword +dataciterestapiurlstring=https\:\/\/api.test.datacite.org baseurlstring=https\:\/\/mds.test.datacite.org + # AWS settings # https://guides.dataverse.org/en/latest/installation/config.html#id90 -#aws_bucket_name=aws_bucket_name -#aws_s3_profile=aws_s3_profile -#aws_endpoint_url=aws_endpoint_url +aws_config=/secrets/aws-cli/.aws/cloudian +aws_bucket_name=awsbucketname +aws_s3_profile=cloudian +aws_endpoint=s3-oslo.educloud.no +aws_endpoint_url=https\:\/\/${aws_endpoint} +#aws_endpoint_region=oslo + +# AWS UiT +aws_uit_bucket_name=awsbucketname2 +aws_uit_s3_profile=uit +#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no -# Mail relay + +# Mail # https://guides.dataverse.org/en/latest/developers/troubleshooting.html -#system_email=system_email -#mailhost=mailhost -#mailuser=mailuser -#no_reply_email=no_reply_email -#smtp_password=smtp_password -#smtp_port=smtp_port -#socket_port=socket_port +system_email="" +mailhost=smtp-relay.exemple.dataverse +mailuser="DataverseNO " +no_reply_email=no-reply@dataverse.no +smtp_password=smtppassword +smtp_port=465 +socket_port=465 +support_email="DataverseNO " # Federated authentification file # https://guides.dataverse.org/en/latest/installation/shibboleth.html -#federated_json_file=federated_json_file - -# MinIO bucket 1 -# https://guides.dataverse.org/en/latest/installation/config.html#id87 -#bucketname_1=bucketname_1 -#minio_label_1=minio_label_1 -#minio_bucket_1=minio_bucket_1 -#minio_profile_1=minio_profile_1 - -# MinIO bucket 2 -# https://guides.dataverse.org/en/latest/installation/config.html#id87 -#bucketname_2=bucketname_2 -#minio_label_1=minio_label_2 -#minio_bucket_1=minio_bucket_2 -#minio_profile_1=minio_profile_2 +federated_json_file=/secrets/openid.json +azure_json_file=/secrets/azopenid.json +orcid_json_file=/secrets/orcid-member.json + diff --git a/.gitignore b/.gitignore index 71384b4..f6fb909 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,7 @@ dataverse.war +.env +.env.2* +.gitignore #Ignoring IDE files .idea @@ -7,3 +10,479 @@ dataverse.war #Ignoring letsencrpt folders for SSL letsencrypt letsencrypt/* + +distros/dataverse.no/init.d/preboot.payara + +# Created by https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,java +# Edit at https://www.toptal.com/developers/gitignore?templates=visualstudio,visualstudiocode,java + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +### VisualStudio ### +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# Visual Studio History (VSHistory) files +.vshistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools +*.code-workspace + +# Local History for Visual Studio Code + +# Windows Installer files from build outputs +*.cab +*.msi +*.msix +*.msm +*.msp + +# JetBrains Rider +*.sln.iml + +### VisualStudio Patch ### +# Additional files built by Visual Studio + + +### Java ### +# Compiled class file +*.class + +# Log file +*.log + +# BlueJ files +*.ctxt + +# Mobile Tools for Java (J2ME) +.mtj.tmp/ + +# Package Files # +*.jar +*.war +*.nar +*.ear +*.zip +*.tar.gz +*.rar + +# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml +hs_err_pid* +replay_pid* + +# End of https://www.toptal.com/developers/gitignore/api/visualstudio,visualstudiocode,java + +# Created by https://www.toptal.com/developers/gitignore/api/vim +# Edit at https://www.toptal.com/developers/gitignore?templates=vim + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +# End of https://www.toptal.com/developers/gitignore/api/vim + +distros/dataverse.no/init.d/preboot.payara diff --git a/distros/dataverse.no/configs/backup-http-ssl.conf b/distros/dataverse.no/configs/backup-http-ssl.conf new file mode 100755 index 0000000..19247ee --- /dev/null +++ b/distros/dataverse.no/configs/backup-http-ssl.conf @@ -0,0 +1,294 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 443 https + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test-docker.dataverse.no + + SSLProxyEngine on + ProxyPass / https://test-docker.dataverse.no:443/ + ProxyPassReverse / https://test-docker.dataverse.no:443/ + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test-docker.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +LoadModule dumpio_module modules/mod_dumpio.so + + DumpIOInput On + DumpIOOutput On + +LogLevel dumpio:trace7 + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +#ProxyPass / ajp://dataverse:8009 +#ProxyPassReverse / ajp://dataverse:8009 + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +#CustomLog /dev/stdout \ +# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" +Customlog /var/log/httpd/access.log combined +ErrorLog /var/log/httpd/error.log + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/backup.http-ssl.conf b/distros/dataverse.no/configs/backup.http-ssl.conf new file mode 100755 index 0000000..414970c --- /dev/null +++ b/distros/dataverse.no/configs/backup.http-ssl.conf @@ -0,0 +1,287 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 443 https + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test-docker.dataverse.no + + SSLProxyEngine on + ProxyPass / https://test-docker.dataverse.no:443/ + ProxyPassReverse / https://test-docker.dataverse.no:443/ + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test-docker.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +LogLevel warn + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +#ProxyPass / ajp://dataverse:8009 +#ProxyPassReverse / ajp://dataverse:8009 + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +CustomLog /dev/stdout \ + "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/citation.tsv b/distros/dataverse.no/configs/citation.tsv new file mode 100644 index 0000000..18bc31c --- /dev/null +++ b/distros/dataverse.no/configs/citation.tsv @@ -0,0 +1,326 @@ +#metadataBlock name dataverseAlias displayName blockURI + citation Citation Metadata https://dataverse.org/schema/citation/ +#datasetField name title description watermark fieldType displayOrder displayFormat advancedSearchField allowControlledVocabulary allowmultiples facetable displayoncreate required parent metadatablock_id termURI + title Title The main title of the Dataset text 0 TRUE FALSE FALSE FALSE TRUE TRUE citation http://purl.org/dc/terms/title + subtitle Subtitle A secondary title that amplifies or states certain limitations on the main title text 1 FALSE FALSE FALSE FALSE FALSE FALSE citation + alternativeTitle Alternative Title Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title text 2 FALSE FALSE FALSE FALSE FALSE FALSE citation http://purl.org/dc/terms/alternative + alternativeURL Alternative URL Another URL where one can view or access the data in the Dataset, e.g. a project or personal webpage https:// url 3 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE citation https://schema.org/distribution + otherId Other Identifier Another unique identifier for the Dataset (e.g. producer's or another repository's identifier) none 4 : FALSE FALSE TRUE FALSE FALSE FALSE citation + otherIdAgency Agency The name of the agency that generated the other identifier text 5 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation + otherIdValue Identifier Another identifier uniquely identifies the Dataset text 6 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE otherId citation + author Author The entity, e.g. a person or organization, that created the Dataset none 7 FALSE FALSE TRUE FALSE TRUE TRUE citation http://purl.org/dc/terms/creator + authorName Name The name of the author, such as the person's name or the name of an organization 1) Family Name, Given Name or 2) Organization XYZ text 8 #VALUE TRUE FALSE FALSE TRUE TRUE TRUE author citation + authorAffiliation Affiliation The name of the entity affiliated with the author, e.g. an organization's name Organization XYZ text 9 (#VALUE) TRUE FALSE FALSE TRUE TRUE FALSE author citation + authorIdentifierScheme Identifier Type The type of identifier that uniquely identifies the author (e.g. ORCID, ISNI) text 10 - #VALUE: FALSE TRUE FALSE FALSE TRUE FALSE author citation http://purl.org/spar/datacite/AgentIdentifierScheme + authorIdentifier Identifier Uniquely identifies the author when paired with an identifier type text 11 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE author citation http://purl.org/spar/datacite/AgentIdentifier + datasetContact Point of Contact The entity, e.g. a person or organization, that users of the Dataset can contact with questions none 12 FALSE FALSE TRUE FALSE TRUE TRUE citation + datasetContactName Name The name of the point of contact, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 13 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation + datasetContactAffiliation Affiliation The name of the entity affiliated with the point of contact, e.g. an organization's name Organization XYZ text 14 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE datasetContact citation + datasetContactEmail E-mail The point of contact's email address name@email.xyz email 15 #EMAIL FALSE FALSE FALSE FALSE TRUE TRUE datasetContact citation + dsDescription Description A summary describing the purpose, nature, and scope of the Dataset none 16 FALSE FALSE TRUE FALSE TRUE TRUE citation + dsDescriptionValue Text A summary describing the purpose, nature, and scope of the Dataset textbox 17 #VALUE TRUE FALSE FALSE FALSE TRUE TRUE dsDescription citation + dsDescriptionDate Date The date when the description was added to the Dataset. If the Dataset contains more than one description, e.g. the data producer supplied one description and the data repository supplied another, this date is used to distinguish between the descriptions YYYY-MM-DD date 18 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE dsDescription citation + subject Subject The area of study relevant to the Dataset text 19 TRUE TRUE TRUE TRUE TRUE TRUE citation http://purl.org/dc/terms/subject + keyword Keyword A key term that describes an important aspect of the Dataset and information about any controlled vocabulary used none 20 FALSE FALSE TRUE FALSE TRUE FALSE citation + keywordValue Term A key term that describes important aspects of the Dataset text 21 #VALUE TRUE FALSE FALSE TRUE TRUE FALSE keyword citation + keywordVocabulary Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 22 (#VALUE) FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + keywordVocabularyURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 23 #VALUE FALSE FALSE FALSE FALSE TRUE FALSE keyword citation + topicClassification Topic Classification Indicates a broad, important topic or subject that the Dataset covers and information about any controlled vocabulary used none 24 FALSE FALSE TRUE FALSE FALSE FALSE citation + topicClassValue Term A topic or subject term text 25 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE topicClassification citation + topicClassVocab Controlled Vocabulary Name The controlled vocabulary used for the keyword term (e.g. LCSH, MeSH) text 26 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + topicClassVocabURI Controlled Vocabulary URL The URL where one can access information about the term's controlled vocabulary https:// url 27 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE topicClassification citation + publication Related Publication The article or report that uses the data in the Dataset. The full list of related publications will be displayed on the metadata tab none 28 FALSE FALSE TRUE FALSE TRUE FALSE citation http://purl.org/dc/terms/isReferencedBy + publicationCitation Citation The full bibliographic citation for the related publication textbox 29 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/dc/terms/bibliographicCitation + publicationIDType Identifier Type The type of identifier that uniquely identifies a related publication text 30 #VALUE: TRUE TRUE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifierScheme + publicationIDNumber Identifier The identifier for a related publication text 31 #VALUE TRUE FALSE FALSE FALSE TRUE FALSE publication citation http://purl.org/spar/datacite/ResourceIdentifier + publicationURL URL The URL form of the identifier entered in the Identifier field, e.g. the DOI URL if a DOI was entered in the Identifier field. Used to display what was entered in the ID Type and ID Number fields as a link. If what was entered in the Identifier field has no URL form, the URL of the publication webpage is used, e.g. a journal article webpage https:// url 32 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE publication citation https://schema.org/distribution + notesText Notes Additional information about the Dataset textbox 33 FALSE FALSE FALSE FALSE TRUE FALSE citation + language Language A language that the Dataset's files is written in text 34 TRUE TRUE TRUE TRUE FALSE FALSE citation http://purl.org/dc/terms/language + producer Producer The entity, such a person or organization, managing the finances or other administrative processes involved in the creation of the Dataset none 35 FALSE FALSE TRUE FALSE FALSE FALSE citation + producerName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 36 #VALUE TRUE FALSE FALSE TRUE FALSE TRUE producer citation + producerAffiliation Affiliation The name of the entity affiliated with the producer, e.g. an organization's name Organization XYZ text 37 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerAbbreviation Abbreviated Name The producer's abbreviated name (e.g. IQSS, ICPSR) text 38 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerURL URL The URL of the producer's website https:// url 39 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE producer citation + producerLogoURL Logo URL The URL of the producer's logo https:// url 40
FALSE FALSE FALSE FALSE FALSE FALSE producer citation + productionDate Production Date The date when the data were produced (not distributed, published, or archived) YYYY-MM-DD date 41 TRUE FALSE FALSE TRUE FALSE FALSE citation + productionPlace Production Location The location where the data and any related materials were produced or collected text 42 TRUE FALSE TRUE TRUE FALSE FALSE citation + contributor Contributor The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset none 43 : FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/contributor + contributorType Type Indicates the type of contribution made to the dataset text 44 #VALUE TRUE TRUE FALSE TRUE FALSE FALSE contributor citation + contributorName Name The name of the contributor, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 45 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE contributor citation + grantNumber Funding Information Information about the Dataset's financial support none 46 : FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/sponsor + grantNumberAgency Agency The agency that provided financial support for the Dataset Organization XYZ text 47 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + grantNumberValue Identifier The grant identifier or contract identifier of the agency that provided financial support for the Dataset text 48 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE grantNumber citation + distributor Distributor The entity, such as a person or organization, designated to generate copies of the Dataset, including any editions or revisions none 49 FALSE FALSE TRUE FALSE FALSE FALSE citation + distributorName Name The name of the entity, e.g. the person's name or the name of an organization 1) FamilyName, GivenName or 2) Organization text 50 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE distributor citation + distributorAffiliation Affiliation The name of the entity affiliated with the distributor, e.g. an organization's name Organization XYZ text 51 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorAbbreviation Abbreviated Name The distributor's abbreviated name (e.g. IQSS, ICPSR) text 52 (#VALUE) FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorURL URL The URL of the distributor's webpage https:// url 53 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributorLogoURL Logo URL The URL of the distributor's logo image, used to show the image on the Dataset's page https:// url 54
FALSE FALSE FALSE FALSE FALSE FALSE distributor citation + distributionDate Distribution Date The date when the Dataset was made available for distribution/presentation YYYY-MM-DD date 55 TRUE FALSE FALSE TRUE FALSE FALSE citation + depositor Depositor The entity, such as a person or organization, that deposited the Dataset in the repository 1) FamilyName, GivenName or 2) Organization text 56 FALSE FALSE FALSE FALSE FALSE FALSE citation + dateOfDeposit Deposit Date The date when the Dataset was deposited into the repository YYYY-MM-DD date 57 FALSE FALSE FALSE TRUE FALSE FALSE citation http://purl.org/dc/terms/dateSubmitted + timePeriodCovered Time Period The time period that the data refer to. Also known as span. This is the time period covered by the data, not the dates of coding, collecting data, or making documents machine-readable none 58 ; FALSE FALSE TRUE FALSE FALSE FALSE citation https://schema.org/temporalCoverage + timePeriodCoveredStart Start Date The start date of the time period that the data refer to YYYY-MM-DD date 59 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + timePeriodCoveredEnd End Date The end date of the time period that the data refer to YYYY-MM-DD date 60 #NAME: #VALUE TRUE FALSE FALSE TRUE FALSE FALSE timePeriodCovered citation + dateOfCollection Date of Collection The dates when the data were collected or generated none 61 ; FALSE FALSE TRUE FALSE FALSE FALSE citation + dateOfCollectionStart Start Date The date when the data collection started YYYY-MM-DD date 62 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + dateOfCollectionEnd End Date The date when the data collection ended YYYY-MM-DD date 63 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE dateOfCollection citation + kindOfData Data Type The type of data included in the files (e.g. survey data, clinical data, or machine-readable text) text 64 TRUE FALSE TRUE TRUE FALSE FALSE citation http://rdf-vocabulary.ddialliance.org/discovery#kindOfData + series Series Information about the dataset series to which the Dataset belong none 65 : FALSE FALSE TRUE FALSE FALSE FALSE citation + seriesName Name The name of the dataset series text 66 #VALUE TRUE FALSE FALSE TRUE FALSE FALSE series citation + seriesInformation Information Can include 1) a history of the series and 2) a summary of features that apply to the series textbox 67 #VALUE FALSE FALSE FALSE FALSE FALSE FALSE series citation + software Software Information about the software used to generate the Dataset none 68 , FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasGeneratedBy + softwareName Name The name of software used to generate the Dataset text 69 #VALUE FALSE TRUE FALSE FALSE FALSE FALSE software citation + softwareVersion Version The version of the software used to generate the Dataset, e.g. 4.11 text 70 #NAME: #VALUE FALSE FALSE FALSE FALSE FALSE FALSE software citation + relatedMaterial Related Material Information, such as a persistent ID or citation, about the material related to the Dataset, such as appendices or sampling information available outside of the Dataset textbox 71 FALSE FALSE TRUE FALSE FALSE FALSE citation + relatedDatasets Related Dataset Information, such as a persistent ID or citation, about a related dataset, such as previous research on the Dataset's subject textbox 72 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/relation + otherReferences Other Reference Information, such as a persistent ID or citation, about another type of resource that provides background or supporting material to the Dataset text 73 FALSE FALSE TRUE FALSE FALSE FALSE citation http://purl.org/dc/terms/references + dataSources Data Source Information, such as a persistent ID or citation, about sources of the Dataset (e.g. a book, article, serial, or machine-readable data file) textbox 74 FALSE FALSE TRUE FALSE FALSE FALSE citation https://www.w3.org/TR/prov-o/#wasDerivedFrom + originOfSources Origin of Historical Sources For historical sources, the origin and any rules followed in establishing them as sources textbox 75 FALSE FALSE FALSE FALSE FALSE FALSE citation + characteristicOfSources Characteristic of Sources Characteristics not already noted elsewhere textbox 76 FALSE FALSE FALSE FALSE FALSE FALSE citation + accessToSources Documentation and Access to Sources 1) Methods or procedures for accessing data sources and 2) any special permissions needed for access textbox 77 FALSE FALSE FALSE FALSE FALSE FALSE citation +#controlledVocabulary DatasetField Value identifier displayOrder + subject Agricultural Sciences D01 0 + subject Arts and Humanities D0 1 + subject Astronomy and Astrophysics D1 2 + subject Business and Management D2 3 + subject Chemistry D3 4 + subject Computer and Information Science D7 5 + subject Earth and Environmental Sciences D4 6 + subject Engineering D5 7 + subject Law D8 8 + subject Mathematical Sciences D9 9 + subject Medicine, Health and Life Sciences D6 10 + subject Physics D10 11 + subject Social Sciences D11 12 + subject Other D12 13 + publicationIDType ark 0 + publicationIDType arXiv 1 + publicationIDType bibcode 2 + publicationIDType cstr 3 + publicationIDType doi 4 + publicationIDType ean13 5 + publicationIDType eissn 6 + publicationIDType handle 7 + publicationIDType isbn 8 + publicationIDType issn 9 + publicationIDType istc 10 + publicationIDType lissn 11 + publicationIDType lsid 12 + publicationIDType pmid 13 + publicationIDType purl 14 + publicationIDType upc 15 + publicationIDType url 16 + publicationIDType urn 17 + publicationIDType DASH-NRS 18 + contributorType Data Collector 0 + contributorType Data Curator 1 + contributorType Data Manager 2 + contributorType Editor 3 + contributorType Funder 4 + contributorType Hosting Institution 5 + contributorType Project Leader 6 + contributorType Project Manager 7 + contributorType Project Member 8 + contributorType Related Person 9 + contributorType Researcher 10 + contributorType Research Group 11 + contributorType Rights Holder 12 + contributorType Sponsor 13 + contributorType Supervisor 14 + contributorType Work Package Leader 15 + contributorType Other 16 + authorIdentifierScheme ORCID 0 + authorIdentifierScheme ISNI 1 + authorIdentifierScheme LCNA 2 + authorIdentifierScheme VIAF 3 + authorIdentifierScheme GND 4 + authorIdentifierScheme DAI 5 + authorIdentifierScheme ResearcherID 6 + authorIdentifierScheme ScopusID 7 + language Abkhaz 0 + language Afar 1 aar aa + language Afrikaans 2 afr af + language Akan 3 aka ak + language Albanian 4 sqi alb sq + language Amharic 5 amh am + language Arabic 6 ara ar + language Aragonese 7 arg an + language Armenian 8 hye arm hy + language Assamese 9 asm as + language Avaric 10 ava av + language Avestan 11 ave ae + language Aymara 12 aym ay + language Azerbaijani 13 aze az + language Bambara 14 bam bm + language Bashkir 15 bak ba + language Basque 16 eus baq eu + language Belarusian 17 bel be + language Bengali, Bangla 18 ben bn + language Bihari 19 bih bh + language Bislama 20 bis bi + language Bosnian 21 bos bs + language Breton 22 bre br + language Bulgarian 23 bul bg + language Burmese 24 mya bur my + language Catalan,Valencian 25 cat ca + language Chamorro 26 cha ch + language Chechen 27 che ce + language Chichewa, Chewa, Nyanja 28 nya ny + language Chinese 29 zho chi zh + language Chuvash 30 chv cv + language Cornish 31 cor kw + language Corsican 32 cos co + language Cree 33 cre cr + language Croatian 34 hrv src hr + language Czech 35 ces cze cs + language Danish 36 dan da + language Divehi, Dhivehi, Maldivian 37 div dv + language Dutch 38 nld dut nl + language Dzongkha 39 dzo dz + language English 40 eng en + language Esperanto 41 epo eo + language Estonian 42 est et + language Ewe 43 ewe ee + language Faroese 44 fao fo + language Fijian 45 fij fj + language Finnish 46 fin fi + language French 47 fra fre fr + language Fula, Fulah, Pulaar, Pular 48 ful ff + language Galician 49 glg gl + language Georgian 50 kat geo ka + language German 51 deu ger de + language Greek (modern) 52 gre ell el + language Guaraní 53 grn gn + language Gujarati 54 guj gu + language Haitian, Haitian Creole 55 hat ht + language Hausa 56 hau ha + language Hebrew (modern) 57 heb he + language Herero 58 her hz + language Hindi 59 hin hi + language Hiri Motu 60 hmo ho + language Hungarian 61 hun hu + language Interlingua 62 ina ia + language Indonesian 63 ind id + language Interlingue 64 ile ie + language Irish 65 gle ga + language Igbo 66 ibo ig + language Inupiaq 67 ipk ik + language Ido 68 ido io + language Icelandic 69 isl ice is + language Italian 70 ita it + language Inuktitut 71 iku iu + language Japanese 72 jpn ja + language Javanese 73 jav jv + language Kalaallisut, Greenlandic 74 kal kl + language Kannada 75 kan kn + language Kanuri 76 kau kr + language Kashmiri 77 kas ks + language Kazakh 78 kaz kk + language Khmer 79 khm km + language Kikuyu, Gikuyu 80 kik ki + language Kinyarwanda 81 kin rw + language Kyrgyz 82 + language Komi 83 kom kv + language Kongo 84 kon kg + language Korean 85 kor ko + language Kurdish 86 kur ku + language Kwanyama, Kuanyama 87 kua kj + language Latin 88 lat la + language Luxembourgish, Letzeburgesch 89 ltz lb + language Ganda 90 lug lg + language Limburgish, Limburgan, Limburger 91 lim li + language Lingala 92 lin ln + language Lao 93 lao lo + language Lithuanian 94 lit lt + language Luba-Katanga 95 lub lu + language Latvian 96 lav lv + language Manx 97 glv gv + language Macedonian 98 mkd mac mk + language Malagasy 99 mlg mg + language Malay 100 may msa ms + language Malayalam 101 mal ml + language Maltese 102 mlt mt + language Māori 103 mao mri mi + language Marathi (Marāṭhī) 104 mar mr + language Marshallese 105 mah mh + language Mixtepec Mixtec 106 mix + language Mongolian 107 mon mn + language Nauru 108 nau na + language Navajo, Navaho 109 nav nv + language Northern Ndebele 110 nde nd + language Nepali 111 nep ne + language Ndonga 112 ndo ng + language Norwegian Bokmål 113 nob nb + language Norwegian Nynorsk 114 nno nn + language Norwegian 115 nor no + language Nuosu 116 + language Southern Ndebele 117 nbl nr + language Occitan 118 oci oc + language Ojibwe, Ojibwa 119 oji oj + language Old Church Slavonic,Church Slavonic,Old Bulgarian 120 chu cu + language Oromo 121 orm om + language Oriya 122 ori or + language Ossetian, Ossetic 123 oss os + language Panjabi, Punjabi 124 pan pa + language Pāli 125 pli pi + language Persian (Farsi) 126 per fas fa + language Polish 127 pol pl + language Pashto, Pushto 128 pus ps + language Portuguese 129 por pt + language Quechua 130 que qu + language Romansh 131 roh rm + language Kirundi 132 run rn + language Romanian 133 ron rum ro + language Russian 134 rus ru + language Sanskrit (Saṁskṛta) 135 san sa + language Sardinian 136 srd sc + language Sindhi 137 snd sd + language Northern Sami 138 sme se + language Samoan 139 smo sm + language Sango 140 sag sg + language Serbian 141 srp scc sr + language Scottish Gaelic, Gaelic 142 gla gd + language Shona 143 sna sn + language Sinhala, Sinhalese 144 sin si + language Slovak 145 slk slo sk + language Slovene 146 slv sl + language Somali 147 som so + language Southern Sotho 148 sot st + language Spanish, Castilian 149 spa es + language Sundanese 150 sun su + language Swahili 151 swa sw + language Swati 152 ssw ss + language Swedish 153 swe sv + language Tamil 154 tam ta + language Telugu 155 tel te + language Tajik 156 tgk tg + language Thai 157 tha th + language Tigrinya 158 tir ti + language Tibetan Standard, Tibetan, Central 159 tib bod bo + language Turkmen 160 tuk tk + language Tagalog 161 tgl tl + language Tswana 162 tsn tn + language Tonga (Tonga Islands) 163 ton to + language Turkish 164 tur tr + language Tsonga 165 tso ts + language Tatar 166 tat tt + language Twi 167 twi tw + language Tahitian 168 tah ty + language Uyghur, Uighur 169 uig ug + language Ukrainian 170 ukr uk + language Urdu 171 urd ur + language Uzbek 172 uzb uz + language Venda 173 ven ve + language Vietnamese 174 vie vi + language Volapük 175 vol vo + language Walloon 176 wln wa + language Welsh 177 cym wel cy + language Wolof 178 wol wo + language Western Frisian 179 fry fy + language Xhosa 180 xho xh + language Yiddish 181 yid yi + language Yoruba 182 yor yo + language Zhuang, Chuang 183 zha za + language Zulu 184 zul zu + language Not applicable 185 diff --git a/distros/dataverse.no/configs/domain.xml b/distros/dataverse.no/configs/domain.xml new file mode 100644 index 0000000..818f4e3 --- /dev/null +++ b/distros/dataverse.no/configs/domain.xml @@ -0,0 +1,649 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + log-notifier + + + + + + + + + + log-notifier + + + + log-notifier + + + log-notifier + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -server + [9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED + [9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED + [9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED + [9|]--add-opens=java.base/java.lang=ALL-UNNAMED + [9|]--add-opens=java.base/java.net=ALL-UNNAMED + [9|]--add-opens=java.base/java.nio=ALL-UNNAMED + [9|]--add-opens=java.base/java.util=ALL-UNNAMED + [9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED + [9|]--add-opens=java.management/sun.management=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED + [9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED + [9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED + [9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED + -XX:NewRatio=2 + -XX:+UnlockDiagnosticVMOptions + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -Djava.awt.headless=true + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Djavax.management.builder.initial=com.sun.enterprise.v3.admin.AppServerMBeanServerBuilder + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Djavax.xml.accessExternalSchema=all + -Djdbc.drivers=org.h2.Driver + -Djdk.corba.allowOutputStreamSubclass=true + -Djdk.tls.rejectClientInitiatedRenegotiation=true + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.shell.remote,org.apache.felix.fileinstall + -Dosgi.shell.telnet.port=6666 + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dgosh.args=--nointeractive + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=2 + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.disableConfigSave=false + -Dcom.ctc.wstx.returnNullForDefaultNamespace=true + -Dorg.glassfish.grizzly.DEFAULT_MEMORY_MANAGER=org.glassfish.grizzly.memory.HeapMemoryManager + -Dorg.glassfish.grizzly.nio.DefaultSelectorHandler.force-selector-spin-detection=true + -Dorg.jboss.weld.serialization.beanIdentifierIndexOptimization=false + [|8]-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + [|8]-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext + [1.8.0|1.8.0u120]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.6.jar + [1.8.0u121|1.8.0u160]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.7.jar + [1.8.0u161|1.8.0u190]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.jar + [1.8.0u191|1.8.0u250]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.1.jar + [1.8.0u251|]-Xbootclasspath/a:${com.sun.aas.installRoot}/lib/grizzly-npn-api.jar + [Azul-1.8.0u222|1.8.0u260]-XX:+UseOpenJSSE + -XX:+UseContainerSupport + -XX:MaxRAMPercentage=${ENV=MEM_MAX_RAM_PERCENTAGE} + -Xss${ENV=MEM_XSS} + -Ddataverse.files.S3.type=s3 + -Ddataverse.files.S3.label=S3 + -Ddataverse.files.S3.bucket-name=2002-green-dataversenotest1 + -Ddataverse.files.S3.download-redirect=true + -Ddataverse.files.S3.url-expiration-minutes=120 + -Ddataverse.files.S3.connection-pool-size=4096 + -Ddataverse.files.storage-driver-id=S3 + -Ddataverse.files.S3.profile=cloudian + -Ddataverse.files.S3.custom-endpoint-url=https://s3-oslo.educloud.no + -Ddataverse.files.file.type=file + -Ddataverse.files.file.label=file + -Ddataverse.files.file.directory=/data + -Ddoi.username=BIBSYS.UIT-ORD + -Ddoi.password=${ALIAS=doi_password_alias} + -Ddoi.dataciterestapiurlstring=https://api.test.datacite.org + -Ddoi.baseurlstring=https://mds.test.datacite.org + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + log-notifier + + + + + + + log-notifier + + + + log-notifier + + + log-notifier + + + + -server + [9|]--add-opens=java.base/jdk.internal.loader=ALL-UNNAMED + [9|]--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED + [9|]--add-exports=java.base/jdk.internal.ref=ALL-UNNAMED + [9|]--add-opens=java.base/java.lang=ALL-UNNAMED + [9|]--add-opens=java.base/java.net=ALL-UNNAMED + [9|]--add-opens=java.base/java.nio=ALL-UNNAMED + [9|]--add-opens=java.base/java.util=ALL-UNNAMED + [9|]--add-opens=java.base/sun.nio.ch=ALL-UNNAMED + [9|]--add-opens=java.management/sun.management=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jrt=ALL-UNNAMED + [9|]--add-opens=java.base/sun.net.www.protocol.jar=ALL-UNNAMED + [9|]--add-opens=java.naming/javax.naming.spi=ALL-UNNAMED + [9|]--add-opens=java.rmi/sun.rmi.transport=ALL-UNNAMED + [9|]--add-opens=java.logging/java.util.logging=ALL-UNNAMED + -Xmx512m + -XX:NewRatio=2 + -XX:+UnlockDiagnosticVMOptions + -Dcom.sun.enterprise.config.config_environment_factory_class=com.sun.enterprise.config.serverbeans.AppserverConfigEnvironmentFactory + -Djava.awt.headless=true + -Djava.security.auth.login.config=${com.sun.aas.instanceRoot}/config/login.conf + -Djava.security.policy=${com.sun.aas.instanceRoot}/config/server.policy + -Dcom.sun.enterprise.security.httpsOutboundKeyAlias=s1as + -Djavax.net.ssl.keyStore=${com.sun.aas.instanceRoot}/config/keystore.jks + -Djavax.net.ssl.trustStore=${com.sun.aas.instanceRoot}/config/cacerts.jks + -Djdbc.drivers=org.h2.Driver + -Djdk.corba.allowOutputStreamSubclass=true + -Djdk.tls.rejectClientInitiatedRenegotiation=true + -DANTLR_USE_DIRECT_CLASS_LOADING=true + -Dorg.glassfish.additionalOSGiBundlesToStart=org.apache.felix.shell,org.apache.felix.gogo.runtime,org.apache.felix.gogo.shell,org.apache.felix.gogo.command,org.apache.felix.fileinstall + -Dosgi.shell.telnet.port=${OSGI_SHELL_TELNET_PORT} + -Dosgi.shell.telnet.maxconn=1 + -Dosgi.shell.telnet.ip=127.0.0.1 + -Dgosh.args=--nointeractive + -Dfelix.fileinstall.dir=${com.sun.aas.installRoot}/modules/autostart/ + -Dfelix.fileinstall.poll=5000 + -Dfelix.fileinstall.log.level=3 + -Dfelix.fileinstall.bundles.new.start=true + -Dfelix.fileinstall.bundles.startTransient=true + -Dfelix.fileinstall.disableConfigSave=false + -Dorg.glassfish.grizzly.DEFAULT_MEMORY_MANAGER=org.glassfish.grizzly.memory.HeapMemoryManager + -Dorg.glassfish.grizzly.nio.DefaultSelectorHandler.force-selector-spin-detection=true + -Dorg.jboss.weld.serialization.beanIdentifierIndexOptimization=false + [|8]-Djava.endorsed.dirs=${com.sun.aas.installRoot}/modules/endorsed${path.separator}${com.sun.aas.installRoot}/lib/endorsed + [|8]-Djava.ext.dirs=${com.sun.aas.javaRoot}/lib/ext${path.separator}${com.sun.aas.javaRoot}/jre/lib/ext${path.separator}${com.sun.aas.instanceRoot}/lib/ext + [1.8.0|1.8.0u120]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.6.jar + [1.8.0u121|1.8.0u160]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.7.jar + [1.8.0u161|1.8.0u190]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.jar + [1.8.0u191|1.8.0u250]-Xbootclasspath/p:${com.sun.aas.installRoot}/lib/grizzly-npn-bootstrap-1.8.1.jar + [1.8.0u251|]-Xbootclasspath/a:${com.sun.aas.installRoot}/lib/grizzly-npn-api.jar + [Azul-1.8.0u222|1.8.0u260]-XX:+UseOpenJSSE + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/distros/dataverse.no/configs/htdocs-ssl/index.html b/distros/dataverse.no/configs/htdocs-ssl/index.html new file mode 100755 index 0000000..6b233f9 --- /dev/null +++ b/distros/dataverse.no/configs/htdocs-ssl/index.html @@ -0,0 +1 @@ +

It works with SSL!

diff --git a/distros/dataverse.no/configs/htdocs/index.html b/distros/dataverse.no/configs/htdocs/index.html new file mode 100755 index 0000000..f5f1c37 --- /dev/null +++ b/distros/dataverse.no/configs/htdocs/index.html @@ -0,0 +1 @@ +

It works!

diff --git a/distros/dataverse.no/configs/http-dataverse.conf b/distros/dataverse.no/configs/http-dataverse.conf new file mode 100644 index 0000000..dae9b15 --- /dev/null +++ b/distros/dataverse.no/configs/http-dataverse.conf @@ -0,0 +1,213 @@ + + + + ServerName test-docker.dataverse.no + DocumentRoot /var/www/html +# ErrorLog /var/logs/http-error_log +# CustomLog /var/logs/http-access_log combined env=!monitor + +#Header always set X-Frame-Options "SAMEORIGIN" +#Header always set X-XSS-Protection "1; mode=block" +#Header always set X-Content-Type-Options "nosniff" + + + Options None + Require all granted + + +RewriteEngine On +RewriteCond %{HTTPS} !=on +RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L] + + + + + ServerName test-docker.dataverse.no + + Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" + + Header always set X-Frame-Options "SAMEORIGIN" + Header always set X-XSS-Protection "1; mode=block" + Header always set X-Content-Type-Options "nosniff" + Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +ErrorLog /dev/stdout +TransferLog /dev/stdout + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. + SSLEngine on + + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol +TLSv1.3 +TLSv1.2 +SSLCipherSuite ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256 +SSLHonorCipherOrder on +SSLCompression off +SSLSessionTickets off + + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +# vty +SSLCertificateFile /etc/ssl/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +# #vty +SSLCertificateKeyFile /etc/ssl/private/localhost.key + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require valid-user + + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + + + +SSLProxyVerify none +SSLProxyCheckPeerCN off +SSLProxyCheckPeerName off +SSLProxyCheckPeerExpire off + +ProxyPassInterpolateEnv On +ProxyPassMatch ^/Shibboleth.sso ! +ProxyPassMatch ^/shibboleth-ds ! +ProxyPass "/" "ajp://dataverse:8009/" timeout=600 +ProxyPassReverse "/" "ajp://dataverse:8009/" timeout=600 +ProxyPassReverseCookieDomain "dataverse" "test-docker.dataverse.no" +ProxyPassReverseCookiePath "/" "/" + + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +#CustomLog /dev/stdout \ +# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" +Customlog /var/log/httpd/access.log combined +ErrorLog /var/log/httpd/error.log + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + + + + ProxyPass ! + +Alias /robots.txt /var/www/robots.txt + diff --git a/distros/dataverse.no/configs/http-ssl-test.conf b/distros/dataverse.no/configs/http-ssl-test.conf new file mode 100755 index 0000000..d33c651 --- /dev/null +++ b/distros/dataverse.no/configs/http-ssl-test.conf @@ -0,0 +1,322 @@ +# +# When we also provide SSL we have to listen to the +# the HTTPS port in addition. +# +Listen 9443 https + +## +## SSL Global Context +## +## All SSL configuration in this context applies both to +## the main server and all SSL-enabled virtual hosts. +## + +# Pass Phrase Dialog: +# Configure the pass phrase gathering process. +# The filtering dialog program (`builtin' is a internal +# terminal dialog) has to provide the pass phrase on stdout. +SSLPassPhraseDialog exec:/usr/libexec/httpd-ssl-pass-dialog + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +## +## SSL Virtual Host Context +## +# + + ServerName test.dataverse.no + DocumentRoot /var/www/html + #ErrorLog /var/logs/http-error_log + #CustomLog /var/logs/http-access_log combined env=!monitor + + Header always set X-Frame-Options "SAMEORIGIN" + Header always set X-XSS-Protection "1; mode=block" + Header always set X-Content-Type-Options "nosniff" + + + Options None + Require all granted + + + RewriteEngine On + RewriteCond %{HTTPS} !=on + RewriteRule ^/?(.*) https://%{SERVER_NAME}/$1 [R,L] + + + + + +# General setup for the virtual host, inherited from global configuration +#DocumentRoot "/var/www/html" +#ServerName www.example.com:443 +ServerName test.dataverse.no +Header always set Strict-Transport-Security "max-age=31536000; includeSubdomains" +# Content-Security-Policy: noen java-filer laster fra http, så denne kan +# ikke brukes. +#Header always set Content-Security-Policy "default-src https:" +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +#:443 + +# Use separate log files for the SSL virtual host; note that LogLevel +# is not inherited from httpd.conf. +ErrorLog /dev/stdout +TransferLog /dev/stdout +LoadModule dumpio_module modules/mod_dumpio.so + + DumpIOInput On + DumpIOOutput On + +LogLevel dumpio:trace7 + +Header always set X-Frame-Options "SAMEORIGIN" +Header always set X-XSS-Protection "1; mode=block" +Header always set X-Content-Type-Options "nosniff" +Header edit Set-Cookie ^(.*)$ "$1; Secure; SameSite=None" + +# SSL Engine Switch: +# Enable/Disable SSL for this virtual host. +SSLEngine on + +# SSL Protocol support: +# List the enable protocol levels with which clients will be able to +# connect. Disable SSLv2 access by default: +SSLProtocol all -SSLv2 -SSLv3 + +# SSL Cipher Suite: +# List the ciphers that the client is permitted to negotiate. +# See the mod_ssl documentation for a complete list. +SSLCipherSuite HIGH:3DES:!aNULL:!MD5:!SEED:!IDEA + +# Speed-optimized SSL Cipher configuration: +# If speed is your main concern (on busy HTTPS servers e.g.), +# you might want to force clients to specific, performance +# optimized ciphers. In this case, prepend those ciphers +# to the SSLCipherSuite list, and enable SSLHonorCipherOrder. +# Caveat: by giving precedence to RC4-SHA and AES128-SHA +# (as in the example below), most connections will no longer +# have perfect forward secrecy - if the server's key is +# compromised, captures of past or future traffic must be +# considered compromised, too. +#SSLCipherSuite RC4-SHA:AES128-SHA:HIGH:MEDIUM:!aNULL:!MD5 +#SSLHonorCipherOrder on + +# Server Certificate: +# Point SSLCertificateFile at a PEM encoded certificate. If +# the certificate is encrypted, then you will be prompted for a +# pass phrase. Note that a kill -HUP will prompt again. A new +# certificate can be generated using the genkey(1) command. +# vty +SSLCertificateFile /etc/pki/tls/certs/localhost.crt + +# Server Private Key: +# If the key is not combined with the certificate, use this +# directive to point at the key file. Keep in mind that if +# you've both a RSA and a DSA private key you can configure +# both in parallel (to also allow the use of DSA ciphers, etc.) +# #vty +SSLCertificateKeyFile /etc/pki/tls/private/localhost.key + +# Server Certificate Chain: +# Point SSLCertificateChainFile at a file containing the +# concatenation of PEM encoded CA certificates which form the +# certificate chain for the server certificate. Alternatively +# the referenced file can be the same as SSLCertificateFile +# when the CA certificates are directly appended to the server +# certificate for convinience. +#SSLCertificateChainFile /etc/pki/tls/certs/server-chain.crt + +# Certificate Authority (CA): +# Set the CA certificate verification path where to find CA +# certificates for client authentication or alternatively one +# huge file containing all of them (file must be PEM encoded) +#SSLCACertificateFile /etc/pki/tls/certs/ca-bundle.crt + +# Client Authentication (Type): +# Client certificate verification type and depth. Types are +# none, optional, require and optional_no_ca. Depth is a +# number which specifies how deeply to verify the certificate +# issuer chain before deciding the certificate is not valid. +#SSLVerifyClient require +#SSLVerifyDepth 10 + +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + # vty + ShibRequestSetting requireSession 1 + require shib-session + + + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require valid-user + + + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + +ProxyPassInterpolateEnv On +ProxyPassMatch ^/Shibboleth.sso ! +ProxyPassMatch ^/shibboleth-ds ! +ProxyPass / ajp://dataverse:8009/ interpolate +ProxyPassReverse / ajp://dataverse:8009/ interpolate +ProxyPassReverseCookieDomain "dataverse" "test.dataverse.no" interpolate +ProxyPassReverseCookiePath "/" "/" interpolate + +# Access Control: +# With SSLRequire you can do per-directory access control based +# on arbitrary complex boolean expressions containing server +# variable checks and other lookup directives. The syntax is a +# mixture between C and Perl. See the mod_ssl documentation +# for more details. +# +#SSLRequire ( %{SSL_CIPHER} !~ m/^(EXP|NULL)/ \ +# and %{SSL_CLIENT_S_DN_O} eq "Snake Oil, Ltd." \ +# and %{SSL_CLIENT_S_DN_OU} in {"Staff", "CA", "Dev"} \ +# and %{TIME_WDAY} >= 1 and %{TIME_WDAY} <= 5 \ +# and %{TIME_HOUR} >= 8 and %{TIME_HOUR} <= 20 ) \ +# or %{REMOTE_ADDR} =~ m/^192\.76\.162\.[0-9]+$/ +# + +# SSL Engine Options: +# Set various options for the SSL engine. +# o FakeBasicAuth: +# Translate the client X.509 into a Basic Authorisation. This means that +# the standard Auth/DBMAuth methods can be used for access control. The +# user name is the `one line' version of the client's X.509 certificate. +# Note that no password is obtained from the user. Every entry in the user +# file needs this password: `xxj31ZMTZzkVA'. +# o ExportCertData: +# This exports two additional environment variables: SSL_CLIENT_CERT and +# SSL_SERVER_CERT. These contain the PEM-encoded certificates of the +# server (always existing) and the client (only existing when client +# authentication is used). This can be used to import the certificates +# into CGI scripts. +# o StdEnvVars: +# This exports the standard SSL/TLS related `SSL_*' environment variables. +# Per default this exportation is switched off for performance reasons, +# because the extraction step is an expensive operation and is usually +# useless for serving static content. So one usually enables the +# exportation for CGI and SSI requests only. +# o StrictRequire: +# This denies access when "SSLRequireSSL" or "SSLRequire" applied even +# under a "Satisfy any" situation, i.e. when it applies access is denied +# and no other module can change it. +# o OptRenegotiate: +# This enables optimized SSL connection renegotiation handling when SSL +# directives are used in per-directory context. +#SSLOptions +FakeBasicAuth +ExportCertData +StrictRequire + + SSLOptions +StdEnvVars + + + SSLOptions +StdEnvVars + + +# SSL Protocol Adjustments: +# The safe and default but still SSL/TLS standard compliant shutdown +# approach is that mod_ssl sends the close notify alert but doesn't wait for +# the close notify alert from client. When you need a different shutdown +# approach you can use one of the following variables: +# o ssl-unclean-shutdown: +# This forces an unclean shutdown when the connection is closed, i.e. no +# SSL close notify alert is send or allowed to received. This violates +# the SSL/TLS standard but is needed for some brain-dead browsers. Use +# this when you receive I/O errors because of the standard approach where +# mod_ssl sends the close notify alert. +# o ssl-accurate-shutdown: +# This forces an accurate shutdown when the connection is closed, i.e. a +# SSL close notify alert is send and mod_ssl waits for the close notify +# alert of the client. This is 100% SSL/TLS standard compliant, but in +# practice often causes hanging connections with brain-dead browsers. Use +# this only for browsers where you know that their SSL implementation +# works correctly. +# Notice: Most problems of broken clients are also related to the HTTP +# keep-alive facility, so you usually additionally want to disable +# keep-alive for those clients, too. Use variable "nokeepalive" for this. +# Similarly, one has to force some clients to use HTTP/1.0 to workaround +# their broken HTTP/1.1 implementation. Use variables "downgrade-1.0" and +# "force-response-1.0" for this. +BrowserMatch "MSIE [2-5]" \ + nokeepalive ssl-unclean-shutdown \ + downgrade-1.0 force-response-1.0 + +# Per-Server Logging: +# The home of a custom SSL log file. Use this when you want a +# compact non-error SSL logfile on a virtual host basis. +#CustomLog /dev/stdout \ +# "%t %h %{SSL_PROTOCOL}x %{SSL_CIPHER}x \"%r\" %b" +Customlog /var/log/httpd/access.log combined +ErrorLog /var/log/httpd/error.log + +ErrorLogFormat "httpd-ssl-error [%{u}t] [%-m:%l] [pid %P:tid %T] %7F: %E: [client\ %a] %M% ,\ referer\ %{Referer}i" + + diff --git a/distros/dataverse.no/configs/http-ssl.conf b/distros/dataverse.no/configs/http-ssl.conf new file mode 100644 index 0000000..19a7ca9 --- /dev/null +++ b/distros/dataverse.no/configs/http-ssl.conf @@ -0,0 +1,589 @@ +# +# This is the main Apache HTTP server configuration file. It contains the +# configuration directives that give the server its instructions. +# See for detailed information. +# In particular, see +# +# for a discussion of each configuration directive. +# +# Do NOT simply read the instructions in here without understanding +# what they do. They're here only as hints or reminders. If you are unsure +# consult the online docs. You have been warned. +# +# Configuration and logfile names: If the filenames you specify for many +# of the server's control files begin with "/" (or "drive:/" for Win32), the +# server will use that explicit path. If the filenames do *not* begin +# with "/", the value of ServerRoot is prepended -- so "logs/access_log" +# with ServerRoot set to "/usr/local/apache2" will be interpreted by the +# server as "/usr/local/apache2/logs/access_log", whereas "/logs/access_log" +# will be interpreted as '/logs/access_log'. + +# +# ServerRoot: The top of the directory tree under which the server's +# configuration, error, and log files are kept. +# +# Do not add a slash at the end of the directory path. If you point +# ServerRoot at a non-local disk, be sure to specify a local disk on the +# Mutex directive, if file-based mutexes are used. If you wish to share the +# same ServerRoot for multiple httpd daemons, you will need to change at +# least PidFile. +# +ServerRoot "/usr/local/apache2" + +# +# Mutex: Allows you to set the mutex mechanism and mutex file directory +# for individual mutexes, or change the global defaults +# +# Uncomment and change the directory if mutexes are file-based and the default +# mutex file directory is not on a local disk or is not appropriate for some +# other reason. +# +# Mutex default:logs + +# +# Listen: Allows you to bind Apache to specific IP addresses and/or +# ports, instead of the default. See also the +# directive. +# +# Change this to Listen on specific IP addresses as shown below to +# prevent Apache from glomming onto all bound IP addresses. +# +#Listen 12.34.56.78:80 +Listen 80 +Listen 443 +TimeOut 600 +LimitRequestBody 0 + +# +# Dynamic Shared Object (DSO) Support +# +# To be able to use the functionality of a module which was built as a DSO you +# have to place corresponding `LoadModule' lines at this location so the +# directives contained in it are actually available _before_ they are used. +# Statically compiled modules (those listed by `httpd -l') do not need +# to be loaded here. +# +# Example: +# LoadModule foo_module modules/mod_foo.so +# +LoadModule mpm_event_module modules/mod_mpm_event.so +#LoadModule mpm_prefork_module modules/mod_mpm_prefork.so +#LoadModule mpm_worker_module modules/mod_mpm_worker.so +LoadModule authn_file_module modules/mod_authn_file.so +#LoadModule authn_dbm_module modules/mod_authn_dbm.so +#LoadModule authn_anon_module modules/mod_authn_anon.so +#LoadModule authn_dbd_module modules/mod_authn_dbd.so +#LoadModule authn_socache_module modules/mod_authn_socache.so +LoadModule authn_core_module modules/mod_authn_core.so +LoadModule authz_host_module modules/mod_authz_host.so +LoadModule authz_groupfile_module modules/mod_authz_groupfile.so +LoadModule authz_user_module modules/mod_authz_user.so +#LoadModule authz_dbm_module modules/mod_authz_dbm.so +#LoadModule authz_owner_module modules/mod_authz_owner.so +#LoadModule authz_dbd_module modules/mod_authz_dbd.so +LoadModule authz_core_module modules/mod_authz_core.so +#LoadModule authnz_ldap_module modules/mod_authnz_ldap.so +#LoadModule authnz_fcgi_module modules/mod_authnz_fcgi.so +LoadModule access_compat_module modules/mod_access_compat.so +LoadModule auth_basic_module modules/mod_auth_basic.so +#LoadModule auth_form_module modules/mod_auth_form.so +#LoadModule auth_digest_module modules/mod_auth_digest.so +#LoadModule allowmethods_module modules/mod_allowmethods.so +#LoadModule isapi_module modules/mod_isapi.so +#LoadModule file_cache_module modules/mod_file_cache.so +#LoadModule cache_module modules/mod_cache.so +#LoadModule cache_disk_module modules/mod_cache_disk.so +#LoadModule cache_socache_module modules/mod_cache_socache.so +LoadModule socache_shmcb_module modules/mod_socache_shmcb.so +#LoadModule socache_dbm_module modules/mod_socache_dbm.so +#LoadModule socache_memcache_module modules/mod_socache_memcache.so +#LoadModule socache_redis_module modules/mod_socache_redis.so +#LoadModule watchdog_module modules/mod_watchdog.so +#LoadModule macro_module modules/mod_macro.so +#LoadModule dbd_module modules/mod_dbd.so +#LoadModule bucketeer_module modules/mod_bucketeer.so +#LoadModule dumpio_module modules/mod_dumpio.so +#LoadModule echo_module modules/mod_echo.so +#LoadModule example_hooks_module modules/mod_example_hooks.so +#LoadModule case_filter_module modules/mod_case_filter.so +#LoadModule case_filter_in_module modules/mod_case_filter_in.so +#LoadModule example_ipc_module modules/mod_example_ipc.so +#LoadModule buffer_module modules/mod_buffer.so +#LoadModule data_module modules/mod_data.so +#LoadModule ratelimit_module modules/mod_ratelimit.so +LoadModule reqtimeout_module modules/mod_reqtimeout.so +#LoadModule ext_filter_module modules/mod_ext_filter.so +#LoadModule request_module modules/mod_request.so +#LoadModule include_module modules/mod_include.so +LoadModule filter_module modules/mod_filter.so +#LoadModule reflector_module modules/mod_reflector.so +#LoadModule substitute_module modules/mod_substitute.so +#LoadModule sed_module modules/mod_sed.so +#LoadModule charset_lite_module modules/mod_charset_lite.so +#LoadModule deflate_module modules/mod_deflate.so +#LoadModule xml2enc_module modules/mod_xml2enc.so +#LoadModule proxy_html_module modules/mod_proxy_html.so +#LoadModule brotli_module modules/mod_brotli.so +LoadModule mime_module modules/mod_mime.so +#LoadModule ldap_module modules/mod_ldap.so +LoadModule log_config_module modules/mod_log_config.so +#LoadModule log_debug_module modules/mod_log_debug.so +#LoadModule log_forensic_module modules/mod_log_forensic.so +#LoadModule logio_module modules/mod_logio.so +#LoadModule lua_module modules/mod_lua.so +LoadModule env_module modules/mod_env.so +#LoadModule mime_magic_module modules/mod_mime_magic.so +#LoadModule cern_meta_module modules/mod_cern_meta.so +#LoadModule expires_module modules/mod_expires.so +LoadModule headers_module modules/mod_headers.so +#LoadModule ident_module modules/mod_ident.so +#LoadModule usertrack_module modules/mod_usertrack.so +#LoadModule unique_id_module modules/mod_unique_id.so +LoadModule setenvif_module modules/mod_setenvif.so +LoadModule version_module modules/mod_version.so +#LoadModule remoteip_module modules/mod_remoteip.so +LoadModule proxy_module modules/mod_proxy.so +#LoadModule proxy_connect_module modules/mod_proxy_connect.so +#LoadModule proxy_ftp_module modules/mod_proxy_ftp.so +#LoadModule proxy_http_module modules/mod_proxy_http.so +#LoadModule proxy_fcgi_module modules/mod_proxy_fcgi.so +#LoadModule proxy_scgi_module modules/mod_proxy_scgi.so +#LoadModule proxy_uwsgi_module modules/mod_proxy_uwsgi.so +#LoadModule proxy_fdpass_module modules/mod_proxy_fdpass.so +#LoadModule proxy_wstunnel_module modules/mod_proxy_wstunnel.so +LoadModule proxy_ajp_module modules/mod_proxy_ajp.so +#LoadModule proxy_balancer_module modules/mod_proxy_balancer.so +#LoadModule proxy_express_module modules/mod_proxy_express.so +#LoadModule proxy_hcheck_module modules/mod_proxy_hcheck.so +#LoadModule session_module modules/mod_session.so +#LoadModule session_cookie_module modules/mod_session_cookie.so +#LoadModule session_crypto_module modules/mod_session_crypto.so +#LoadModule session_dbd_module modules/mod_session_dbd.so +#LoadModule slotmem_shm_module modules/mod_slotmem_shm.so +#LoadModule slotmem_plain_module modules/mod_slotmem_plain.so +LoadModule ssl_module modules/mod_ssl.so +#LoadModule optional_hook_export_module modules/mod_optional_hook_export.so +#LoadModule optional_hook_import_module modules/mod_optional_hook_import.so +#LoadModule optional_fn_import_module modules/mod_optional_fn_import.so +#LoadModule optional_fn_export_module modules/mod_optional_fn_export.so +#LoadModule dialup_module modules/mod_dialup.so +LoadModule http2_module modules/mod_http2.so +#LoadModule proxy_http2_module modules/mod_proxy_http2.so +#LoadModule md_module modules/mod_md.so +#LoadModule lbmethod_byrequests_module modules/mod_lbmethod_byrequests.so +#LoadModule lbmethod_bytraffic_module modules/mod_lbmethod_bytraffic.so +#LoadModule lbmethod_bybusyness_module modules/mod_lbmethod_bybusyness.so +#LoadModule lbmethod_heartbeat_module modules/mod_lbmethod_heartbeat.so +LoadModule unixd_module modules/mod_unixd.so +#LoadModule heartbeat_module modules/mod_heartbeat.so +#LoadModule heartmonitor_module modules/mod_heartmonitor.so +#LoadModule dav_module modules/mod_dav.so +LoadModule status_module modules/mod_status.so +LoadModule autoindex_module modules/mod_autoindex.so +#LoadModule asis_module modules/mod_asis.so +#LoadModule info_module modules/mod_info.so +#LoadModule suexec_module modules/mod_suexec.so + + #LoadModule cgid_module modules/mod_cgid.so + + + #LoadModule cgi_module modules/mod_cgi.so + +#LoadModule dav_fs_module modules/mod_dav_fs.so +#LoadModule dav_lock_module modules/mod_dav_lock.so +#LoadModule vhost_alias_module modules/mod_vhost_alias.so +#LoadModule negotiation_module modules/mod_negotiation.so +LoadModule dir_module modules/mod_dir.so +#LoadModule imagemap_module modules/mod_imagemap.so +#LoadModule actions_module modules/mod_actions.so +#LoadModule speling_module modules/mod_speling.so +#LoadModule userdir_module modules/mod_userdir.so +LoadModule alias_module modules/mod_alias.so +LoadModule rewrite_module modules/mod_rewrite.so +LoadModule mod_shib /usr/lib/apache2/modules/mod_shib.so + + +# +# If you wish httpd to run as a different user or group, you must run +# httpd as root initially and it will switch. +# +# User/Group: The name (or #number) of the user/group to run httpd as. +# It is usually good practice to create a dedicated user and group for +# running httpd, as with most system services. +# +User www-data +Group www-data + + + +# 'Main' server configuration +# +# The directives in this section set up the values used by the 'main' +# server, which responds to any requests that aren't handled by a +# definition. These values also provide defaults for +# any containers you may define later in the file. +# +# All of these directives may appear inside containers, +# in which case these default settings will be overridden for the +# virtual host being defined. +# + +# +# ServerAdmin: Your address, where problems with the server should be +# e-mailed. This address appears on some server-generated pages, such +# as error documents. e.g. admin@your-domain.com +# +ServerAdmin support@dataverse.no + +# +# ServerName gives the name and port that the server uses to identify itself. +# This can often be determined automatically, but we recommend you specify +# it explicitly to prevent problems during startup. +# +# If your host doesn't have a registered DNS name, enter its IP address here. +# +ServerName test-docker.dataverse.no + +# +# Deny access to the entirety of your server's filesystem. You must +# explicitly permit access to web content directories in other +# blocks below. +# + + AllowOverride none + Require all denied + + +# +# Note that from this point forward you must specifically allow +# particular features to be enabled - so if something's not working as +# you might expect, make sure that you have specifically enabled it +# below. +# + +# +# DocumentRoot: The directory out of which you will serve your +# documents. By default, all requests are taken from this directory, but +# symbolic links and aliases may be used to point to other locations. +# +DocumentRoot "/usr/local/apache2/htdocs" + + # + # Possible values for the Options directive are "None", "All", + # or any combination of: + # Indexes Includes FollowSymLinks SymLinksifOwnerMatch ExecCGI MultiViews + # + # Note that "MultiViews" must be named *explicitly* --- "Options All" + # doesn't give it to you. + # + # The Options directive is both complicated and important. Please see + # http://httpd.apache.org/docs/2.4/mod/core.html#options + # for more information. + # + Options Indexes FollowSymLinks + + # + # AllowOverride controls what directives may be placed in .htaccess files. + # It can be "All", "None", or any combination of the keywords: + # AllowOverride FileInfo AuthConfig Limit + # + AllowOverride None + + # + # Controls who can get stuff from this server. + # + Require all granted + + +# +# DirectoryIndex: sets the file that Apache will serve if a directory +# is requested. +# + + DirectoryIndex index.html + + +# +# The following lines prevent .htaccess and .htpasswd files from being +# viewed by Web clients. +# + + Require all denied + + +# +# ErrorLog: The location of the error log file. +# If you do not specify an ErrorLog directive within a +# container, error messages relating to that virtual host will be +# logged here. If you *do* define an error logfile for a +# container, that host's errors will be logged there and not here. +# +ErrorLog /proc/self/fd/2 + +# +# LogLevel: Control the number of messages logged to the error_log. +# Possible values include: debug, info, notice, warn, error, crit, +# alert, emerg. +# +LogLevel warn + + + # + # The following directives define some format nicknames for use with + # a CustomLog directive (see below). + # + LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined + LogFormat "%h %l %u %t \"%r\" %>s %b" common + + + # You need to enable mod_logio.c to use %I and %O + LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\" %I %O" combinedio + + + # + # The location and format of the access logfile (Common Logfile Format). + # If you do not define any access logfiles within a + # container, they will be logged here. Contrariwise, if you *do* + # define per- access logfiles, transactions will be + # logged therein and *not* in this file. + # + CustomLog /proc/self/fd/1 common + + # + # If you prefer a logfile with access, agent, and referer information + # (Combined Logfile Format) you can use the following directive. + # + #CustomLog "logs/access_log" combined + + + + # + # Redirect: Allows you to tell clients about documents that used to + # exist in your server's namespace, but do not anymore. The client + # will make a new request for the document at its new location. + # Example: + # Redirect permanent /foo http://www.example.com/bar + + # + # Alias: Maps web paths into filesystem paths and is used to + # access content that does not live under the DocumentRoot. + # Example: + # Alias /webpath /full/filesystem/path + # + # If you include a trailing / on /webpath then the server will + # require it to be present in the URL. You will also likely + # need to provide a section to allow access to + # the filesystem path. + + # + # ScriptAlias: This controls which directories contain server scripts. + # ScriptAliases are essentially the same as Aliases, except that + # documents in the target directory are treated as applications and + # run by the server when requested rather than as documents sent to the + # client. The same rules about trailing "/" apply to ScriptAlias + # directives as to Alias. + # + ScriptAlias /cgi-bin/ "/usr/local/apache2/cgi-bin/" + + + + + # + # ScriptSock: On threaded servers, designate the path to the UNIX + # socket used to communicate with the CGI daemon of mod_cgid. + # + #Scriptsock cgisock + + +# +# "/usr/local/apache2/cgi-bin" should be changed to whatever your ScriptAliased +# CGI directory exists, if you have that configured. +# + + AllowOverride None + Options None + Require all granted + + + + # + # Avoid passing HTTP_PROXY environment to CGI's on this or any proxied + # backend servers which have lingering "httpoxy" defects. + # 'Proxy' request header is undefined by the IETF, not listed by IANA + # + RequestHeader unset Proxy early + + + + # + # TypesConfig points to the file containing the list of mappings from + # filename extension to MIME-type. + # + TypesConfig conf/mime.types + + # + # AddType allows you to add to or override the MIME configuration + # file specified in TypesConfig for specific file types. + # + #AddType application/x-gzip .tgz + # + # AddEncoding allows you to have certain browsers uncompress + # information on the fly. Note: Not all browsers support this. + # + #AddEncoding x-compress .Z + #AddEncoding x-gzip .gz .tgz + # + # If the AddEncoding directives above are commented-out, then you + # probably should define those extensions to indicate media types: + # + AddType application/x-compress .Z + AddType application/x-gzip .gz .tgz + + # + # AddHandler allows you to map certain file extensions to "handlers": + # actions unrelated to filetype. These can be either built into the server + # or added with the Action directive (see below) + # + # To use CGI scripts outside of ScriptAliased directories: + # (You will also need to add "ExecCGI" to the "Options" directive.) + # + #AddHandler cgi-script .cgi + + # For type maps (negotiated resources): + #AddHandler type-map var + + # + # Filters allow you to process content before it is sent to the client. + # + # To parse .shtml files for server-side includes (SSI): + # (You will also need to add "Includes" to the "Options" directive.) + # + #AddType text/html .shtml + #AddOutputFilter INCLUDES .shtml + + +# +# The mod_mime_magic module allows the server to use various hints from the +# contents of the file itself to determine its type. The MIMEMagicFile +# directive tells the module where the hint definitions are located. +# +#MIMEMagicFile conf/magic + +# +# Customizable error responses come in three flavors: +# 1) plain text 2) local redirects 3) external redirects +# +# Some examples: +#ErrorDocument 500 "The server made a boo boo." +#ErrorDocument 404 /missing.html +#ErrorDocument 404 "/cgi-bin/missing_handler.pl" +#ErrorDocument 402 http://www.example.com/subscription_info.html +# + +# +# MaxRanges: Maximum number of Ranges in a request before +# returning the entire resource, or one of the special +# values 'default', 'none' or 'unlimited'. +# Default setting is to accept 200 Ranges. +#MaxRanges unlimited + +# +# EnableMMAP and EnableSendfile: On systems that support it, +# memory-mapping or the sendfile syscall may be used to deliver +# files. This usually improves server performance, but must +# be turned off when serving from networked-mounted +# filesystems or if support for these functions is otherwise +# broken on your system. +# Defaults: EnableMMAP On, EnableSendfile Off +# +#EnableMMAP off +#EnableSendfile on + +# Supplemental configuration +# +# The configuration files in the conf/extra/ directory can be +# included to add extra features or to modify the default configuration of +# the server, or you may simply copy their contents here and change as +# necessary. + +# Server-pool management (MPM specific) +#Include conf/extra/httpd-mpm.conf + +# Multi-language error messages +#Include conf/extra/httpd-multilang-errordoc.conf + +# Fancy directory listings +#Include conf/extra/httpd-autoindex.conf + +# Language settings +#Include conf/extra/httpd-languages.conf + +# User home directories +#Include conf/extra/httpd-userdir.conf + +# Real-time info on requests and configuration +#Include conf/extra/httpd-info.conf + +# Virtual hosts +Include conf/extra/httpd-vhosts.conf + +# Local access to the Apache HTTP Server Manual +#Include conf/extra/httpd-manual.conf + +# Distributed authoring and versioning (WebDAV) +#Include conf/extra/httpd-dav.conf + +# Various default settings +#Include conf/extra/httpd-default.conf + +# Configure mod_proxy_html to understand HTML4/XHTML1 + +Include conf/extra/proxy-html.conf + + +# Secure (SSL/TLS) connections +#Include conf/extra/httpd-ssl.conf +# +# Note: The following must must be present to support +# starting without SSL on platforms with no /dev/random equivalent +# but a statically compiled-in mod_ssl. +# + +SSLRandomSeed startup builtin +SSLRandomSeed connect builtin + + +# Inter-Process Session Cache: +# Configure the SSL Session Cache: First the mechanism +# to use and second the expiring timeout (in seconds). +SSLSessionCache shmcb:/run/httpd/sslcache(512000) +SSLSessionCacheTimeout 300 + +# Pseudo Random Number Generator (PRNG): +# Configure one or more sources to seed the PRNG of the +# SSL library. The seed data should be of good random quality. +# WARNING! On some platforms /dev/random blocks if not enough entropy +# is available. This means you then cannot use the /dev/random device +# because it would lead to very long connection times (as long as +# it requires to make more entropy available). But usually those +# platforms additionally provide a /dev/urandom device which doesn't +# block. So, if available, use this one instead. Read the mod_ssl User +# Manual for more details. +SSLRandomSeed startup file:/dev/urandom 256 +SSLRandomSeed connect builtin +#SSLRandomSeed startup file:/dev/random 512 +#SSLRandomSeed connect file:/dev/random 512 +#SSLRandomSeed connect file:/dev/urandom 512 + +# +# Use "SSLCryptoDevice" to enable any supported hardware +# accelerators. Use "openssl engine -v" to list supported +# engine names. NOTE: If you enable an accelerator and the +# server does not start, consult the error logs and ensure +# your accelerator is functioning properly. +# +SSLCryptoDevice builtin +#SSLCryptoDevice ubsec + +ServerTokens Prod +ServerSignature Off diff --git a/distros/dataverse.no/configs/microprofile-config.properties b/distros/dataverse.no/configs/microprofile-config.properties new file mode 100644 index 0000000..2ba9969 --- /dev/null +++ b/distros/dataverse.no/configs/microprofile-config.properties @@ -0,0 +1,42 @@ +# GENERAL +# Will be replaced by Maven property in /target via filtering (see ) +#dataverse.version=5.13 +#dataverse.build= + +# Default only for containers! (keep mimicking the current behaviour - +# changing that is part of https://github.com/IQSS/dataverse/issues/6636) +#%ct.dataverse.fqdn=localhost +#%ct.dataverse.siteUrl=http://${dataverse.fqdn}:8080 + +# FILES +dataverse.files.directory=/tmp/dataverse + +# SEARCH INDEX +dataverse.solr.host=localhost +# Activating mp config profile -Dmp.config.profile=ct changes default to "solr" as DNS name +%ct.dataverse.solr.host=solr +dataverse.solr.port=8983 +dataverse.solr.protocol=http +dataverse.solr.core=collection1 +dataverse.solr.path=/solr/${dataverse.solr.core} + +# DATABASE +#dataverse.db.host=localhost +#dataverse.db.port=5432 +#dataverse.db.user=dataverse +#dataverse.db.name=dataverse + +# RSERVE +dataverse.rserve.host=localhost +dataverse.rserve.port=6311 +dataverse.rserve.user=rserve +dataverse.rserve.password=rserve +dataverse.rserve.tempdir=/tmp/Rserv + +# OAI SERVER +dataverse.oai.server.maxidentifiers=100 +dataverse.oai.server.maxrecords=10 +dataverse.oai.server.maxsets=100 +# the OAI repository name, as shown by the Identify verb, +# can be customized via the setting below: +#dataverse.oai.server.repositoryname= diff --git a/distros/dataverse.no/configs/robots.txt b/distros/dataverse.no/configs/robots.txt new file mode 100644 index 0000000..804a067 --- /dev/null +++ b/distros/dataverse.no/configs/robots.txt @@ -0,0 +1,26 @@ +User-agent: * +# Note: In its current form, this sample robots.txt makes the site +# accessible to all the crawler bots (specified as "User-agent: *") +# It further instructs the bots to access and index the dataverse and dataset pages; +# it also tells them to stay away from all other pages (the "Disallow: /" line); +# and also not to follow any search links on a dataverse page. +# It is possible to specify different access rules for different bots. +# For example, if you only want to make the site accessed by Googlebot, but +# keep all the other bots away, un-comment out the following two lines: +#Disallow: / +#User-agent: Googlebot +Allow: /$ +Allow: /dataset.xhtml +Allow: /dataverse/ +Allow: /sitemap/ +# The following lines are for the facebook, twitter and linkedin preview bots: +Allow: /api/datasets/:persistentId/thumbnail +Allow: /javax.faces.resource/images/ +# Comment out the following TWO lines if you DON'T MIND the bots crawling the search API links on dataverse pages: +Disallow: /dataverse/*?q +Disallow: /dataverse/*/search +Disallow: / +# Crawl-delay specification *may* be honored by *some* bots. +# It is *definitely* ignored by Googlebot (they never promise to +# recognize it either - it's never mentioned in their documentation) +Crawl-delay: 20 diff --git a/distros/dataverse.no/configs/schema.xml b/distros/dataverse.no/configs/schema.xml new file mode 100644 index 0000000..5fe31aa --- /dev/null +++ b/distros/dataverse.no/configs/schema.xml @@ -0,0 +1,1546 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/distros/dataverse.no/configs/schema.xml.5.13 b/distros/dataverse.no/configs/schema.xml.5.13 new file mode 100644 index 0000000..f119386 --- /dev/null +++ b/distros/dataverse.no/configs/schema.xml.5.13 @@ -0,0 +1,1554 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/distros/dataverse.no/docker-compose.yaml b/distros/dataverse.no/docker-compose.yaml new file mode 100755 index 0000000..d9375fe --- /dev/null +++ b/distros/dataverse.no/docker-compose.yaml @@ -0,0 +1,272 @@ +version: '3.7' +# Settings and configurations that are common for all containers + +services: + #reverse-proxy: + # The official v2 Traefik docker image + # image: traefik:v2.10.3 + # Enables the web UI and tells Traefik to listen to docker + # container_name: traefik + # command: + # - "--api.insecure=true" + # - "--providers.docker=true" + # - "--providers.docker.exposedbydefault=false" + # - "--providers.docker.network=traefik" + # - "--entrypoints.web.address=:80" + # - "--entrypoints.websecure.address=:443" + # - "--entrypoints.web.http.redirections.entryPoint.to=websecure" + # - "--entrypoints.web.http.redirections.entryPoint.scheme=https" + # - "--providers.file.filename=/var/traefik2/certs/certificates.toml" + # - "--providers.file.watch=true" + # - "--log.level=DEBUG" + # hostname: ${hostname} + # networks: + # - traefik + # ports: + # - 80:80 + # - 9443:443 + # - 8090:8080 + # volumes: + # - /etc/localtime:/etc/localtime:ro + # - /var/run/docker.sock:/var/run/docker.sock:ro + # - "${CONFIGURATION_PATH}/configuration/files:/var/traefik2/certs" + # - "${CONFIGURATION_PATH}/configuration/:/configuration/" + # labels: + # - "traefik.enable=true" + # - "traefik.frontend.rule=Host(`${traefikhost}`)" + # - "traefik.port=8080" + + #- "--entrypoints.web.http.redirections.entryPoint.priority=10" # disable permanent forwarding for every route + + postgres: + networks: + - traefik + container_name: postgres + ports: + - "5433:5432" + image: postgres:15.3 + restart: unless-stopped + + environment: + - "LC_ALL=C.UTF-8" + - "POSTGRES_DB" + - "POSTGRES_USER" + - "POSTGRES_PASSWORD" + - "POSTGRES_PORT" + volumes: + - ${CONFIGURATION_PATH}/database-data:/var/lib/postgresql/data/ # persist data even if container shuts down + - ${POSTGRESTMP}/:/mnttmp/ + #- /extdisk/database-data-prod:/var/lib/postgresql/data/ + #- /extdisk/database-data-demo:/var/lib/postgresql/data/ + + + + shibboleth: + networks: + - traefik + #image: test03/shibboleth:3.3.0.B + image: ${DOCKER_HUB}/shibboleth:3.4.1-5 + container_name: shibboleth + hostname: shibboleth + privileged: true + ports: + - "8089:80" + - "443:443" + volumes: + - ${LOGS_PATH}/shibboleth/httpd:/var/log/httpd + # - ${LOGS_PATH}/shibboleth/shibboleth:/var/log/shibboleth + - ${CONFIGURATION_PATH}/shibboleth:/etc/shibboleth + # - ./configs/http-ssl.conf:/etc/httpd/conf.d/ssl.conf + - ./configs/http-ssl.conf:/usr/local/apache2/conf/httpd.conf + - ./configs/http-dataverse.conf:/usr/local/apache2/conf/extra/httpd-vhosts.conf + - ./configs/robots.txt:/var/www/robots.txt + - ${CONFIGURATION_PATH}/configuration/files/localhost.pem:/etc/ssl/certs/localhost.crt + - ${CONFIGURATION_PATH}/configuration/files/localhost.key:/etc/ssl/private/localhost.key + # hostname: ${hostname} + labels: + - "traefik.enable=true" + - "traefik.http.routers.shibboleth.rule=PathPrefix(`/Shibboleth.sso`, `/shibboleth-sp` )" + #- "traefik.tcp.routers.shibboleth.rule=Host(`${traefikhost}`) && PathPrefix(`/Shibboleth.sso`, `/shibboleth-sp` )" + - "traefik.http.routers.shibboleth.tls=true" + #- "traefik.http.services.shibboleth.loadbalancer.passhostheader=true" + - "traefik.http.services.shibboleth.loadbalancer.server.port=80" + #- "traefik.tcp.services.shibboleth.loadbalancer.server.port=80" + #- "traefik.http.services.shibboleth.loadbalancer.server.scheme=http" + #- "traefik.http.services.shibboleth.loadbalancer.server.url=${traefikhost}" + #- "traefik.http.middlewares.shibboleth.forwardauth.trustForwardHeader=true" + depends_on: + - dataverse + + solr: + networks: + - traefik + #image: solr:8.11.1 + image: ${DOCKER_HUB}/solr:8.9.0 + container_name: solr + privileged: true + ports: + - "8984:8983" + environment: + - "SOLR_HOST=solr" + - "SOLR_PORT=8983" + - "SOLR_JAVA_MEM=-Xms4g -Xmx4g" + - "SOLR_OPTS=-Dlog4j2.formatMsgNoLookups=true" + volumes: + - solr-data:/var/solr/data + # - ./configs/schema.xml:/var/solr/data/collection1/conf/schema.xml + - ./configs/schema.xml.5.13:/var/solr/data/collection1/conf/schema.xml + labels: + - "traefik.enable=true" + - "traefik.http.routers.solr.rule=Host(`solr.${traefikhost}`)" + - "traefik.http.services.solr.loadbalancer.server.port=8983" + - "traefik.http.routers.solr.tls=true" + #- "traefik.http.routers.solr.tls.certresolver=myresolver" + +# whoami: +# networks: +# - traefik +# image: "traefik/whoami" +# container_name: "whoami" +# labels: +# - "traefik.enable=true" +# # - "traefik.http.routers.whoami.entrypoints=web" +# - "traefik.http.routers.whoami.rule=Host(`${traefikhost}`) && PathPrefix(`/whoami`)" +# - "traefik.http.routers.whoami.tls=true" +# - "traefik.http.routers.whoami.tls.certresolver=myresolver" + + dataverse: + networks: + - traefik + image: ${DOCKER_HUB}/dataverse:${VERSION} + #image: coronawhy/dataverse:${VERSION} + container_name: dataverse + hostname: dataverse + privileged: true + user: + "root" + #ports: + #- "443:443" + #- "4849:4848" + #- "8088:8088" + #- "8080:8080" + #- "8099:8009" + #- "8080:8080" # HTTP (Dataverse Application) + #- "8181:8181" # HTTPS (Dataverse Application) + #- "4949:4848" # HTTPS (Payara Admin Console) + #- "8009:8009" # AJP + #- "9009:9009" # JDWP + #- "8686:8686" # JMX +# extra_hosts: +# - "${traefikhost}:51.105.181.173" + environment: + - "CVM_SERVER_NAME=CESSDA" #Optional + - "CESSDA" + - "CLARIN" + - "doi_authority" + - "doi_provider" + - "doi_username" + - "doi_password" + - "dataciterestapiurlstring" + - "baseurlstring" + - "aws_bucket_name" + - "aws_s3_profile" + - "aws_endpoint" + - "aws_endpoint_url" + - "aws_uit_bucket_name" + - "aws_uit_s3_profile" + - "azure_json_file" + - "orcid_json_file" + - "system_email" + - "mailhost" + - "mailuser" + - "no_reply_email" + - "support_email" + - "smtp_password" + - "smtp_port" + - "socket_port" + - "federated_json_file" + - "bucketname_1" + - "bucketname_2" + - "DATAVERSE_DB_HOST" + - "DATAVERSE_DB_USER" + - "DATAVERSE_DB_PASSWORD" + - "DATAVERSE_DB_NAME" + - "DATAVERSE_SERVICE_HOST" + - "DATAVERSE_URL" + - "BASEURL" + - "KEYWINDOWSBLOB" + - "SOLR_SERVICE_HOST" + - "SOLR_SERVICE_PORT" + - "CVM_SERVER_URL=https://ns.${traefikhost}" + #- "CVM_TSV_SOURCE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvocdemo.tsv" + #- "1WAR_FILE=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/dataverse-5.3-cv.war" + #- "WAR_FILE=https://github.com/IQSS/dataverse/releases/download/v5.3/dataverse-5.3.war" + #- "CVM_SQL=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cv-update.sql" + #- "CVM_CONFIG=https://github.com/IQSS/dataverse-docker/releases/download/5.3-cv/cvoc-conf.json" + - "LANG=en" + - "MAINLANG" + - "cvManager=http://" + - "BUNDLEPROPERTIES=Bundle.properties" + - "ADMIN_EMAIL=admin@localhost" + - "MAIL_SERVER=mailrelay" + - "SOLR_LOCATION=solr:8983" + - "INIT_SCRIPTS_FOLDER" + - "hostname" + - "PASSWORD_FILE" + - "POSTGRES_SERVER" + - "POSTGRES_PORT" + - "POSTGRES_DATABASE" + - "POSTGRES_USER" + - "POSTGRES_PASSWORD" + - "PGPASSWORD" + - "TESTBANNER" + - "TWORAVENS_LOCATION=NOT INSTALLED" + - "RSERVE_HOST=localhost" + - "RSERVE_PORT=6311" + - "RSERVE_USER=rserve" + - "RSERVE_PASSWORD=rserve" + - "JVM_OPTS='-Xmx4g -Xms4g -XX:MaxPermSize=4g -XX:PermSize=4g'" + - "WEBANALYTICSON" + - "COUNTERPROSVERSION" + - "GEOIPLICENSE" + - "CONFIG_FILE" + - "PAYARA_ARGS=--debug" + - "aws_config" + - "PREVIEWER" + depends_on: + - postgres + - solr + volumes: + - ${LOGS_PATH}/dataverse:/opt/payara/appserver/glassfish/domains/domain1/logs/ + - ${LOGS_PATH}/makeDataCount:/opt/payara/appserver/glassfish/domains/domain1/logs/makeDataCount + - ${CONFIGURATION_PATH}/secrets:/secrets + - ${LOCAL_STORAGE}/data:/data + - ${DOCROOT}/docroot:/opt/payara/docroot + - ./configs/domain.xml:/opt/payara/domain.xml + - ./init.d:/opt/payara/init.d + - ./configs/microprofile-config.properties:/opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/META-INF/microprofile-config.properties + + # - /distrib/private/secrets/init_2_conf_payara.sh:/opt/payara/scripts/init_2_conf_payara.sh + - /mnt:/mnt + labels: + - "traefik.enable=true" + #- "traefik.http.routers.dataverse.rule=Host(`${traefikhost}`)" + - "traefik.http.routers.dataverse.rule=PathPrefix(`/`)" + - "traefik.http.services.dataverse.loadbalancer.server.port=8080" + - "traefik.http.routers.dataverse.tls=true" + #- "traefik.http.routers.dataverse.middlewares=shibboleth" + #- "traefik.http.routers.dataverse.tls.certresolver=myresolver" +volumes: + solr-data: +# data1-1: +# data1-2: +# data2-1: +# data2-2: +# data3-1: +# data3-2: +# data4-1: +# data4-2: + +networks: + traefik: + external: true diff --git a/distros/dataverse.no/init.d/0000-preboot.sh b/distros/dataverse.no/init.d/0000-preboot.sh new file mode 100755 index 0000000..f767066 --- /dev/null +++ b/distros/dataverse.no/init.d/0000-preboot.sh @@ -0,0 +1,50 @@ +#/bin/bash +echo > ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.storage-driver-id=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.type=s3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.label=S3" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.bucket-name=${aws_bucket_name}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.download-redirect=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.upload-redirect=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.url-expiration-minutes=120" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.connection-pool-size=4096" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.profile=${aws_s3_profile}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logStandardStreams=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "set-log-attributes com.sun.enterprise.server.logging.GFFileHandler.logtoFile=true" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +keyid=$(grep 'access key' $aws_config | awk -F ': ' {'print $2'};) +secret_key=$(grep 'secret' $aws_config | awk -F ': ' {'print $2'};) +endpoint=$aws_endpoint_url +echo "create-system-properties dataverse.files.S3.access-key="$keyid >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.secret-key="$secret_key >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.S3.custom-endpoint-url=$endpoint" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#V5.13 +echo "create-system-properties dataverse.files.uploads=/opt/payara/appserver/glassfish/domains/domain1/uploads" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.siteUrl="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.files.directory=/tmp/dataverse">> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.rserve.port=6311" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.rserve.user=rserve" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.rserve.password=rserve" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.rserve.tempdir=/tmp/Rserv" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.pid.datacite.mds-api-url=${baseurlstring}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.pid.datacite.rest-api-url=${dataciterestapiurlstring}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.pid.datacite.username=${doi_username}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.pid.datacite.password=${doi_password}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.handlenet.key.path=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.handlenet.key.passphrase=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.pid.handlenet.index=300" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.permalink.base-url="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.ezid.api-url="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.ezid.username=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.signposting.level1-author-limit=12" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.signposting.level1-item-limit=12" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.api.allow-incomplete-metadata="'https\:\/\/'${hostname} >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.ui.show-validity-filter=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.ui.allow-review-for-incomplete=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.spi.export.directory=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.mail.support-email=${support_email}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +echo "create-system-properties dataverse.mail.cc-support-on-contact-emails=${support_email}" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.netcdf.geo-extract-s3-direct-upload=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara +#echo "create-system-properties dataverse.pid.ezid.password=test" >> ${INIT_SCRIPTS_FOLDER}/preboot.payara diff --git a/distros/dataverse.no/init.d/003-counterprocessor.sh b/distros/dataverse.no/init.d/003-counterprocessor.sh new file mode 100755 index 0000000..dcd4e33 --- /dev/null +++ b/distros/dataverse.no/init.d/003-counterprocessor.sh @@ -0,0 +1,21 @@ + +#!/bin/bash + +mkdir /opt/payara/counter-processor +cd /opt/payara/counter-processor +wget https://github.com/CDLUC3/counter-processor/archive/v${COUNTERPROSVERSION}.tar.gz -O v${COUNTERPROSVERSION}.tar.gz +tar xvfz v${COUNTERPROSVERSION}.tar.gz +cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION} +curl "https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-Country&license_key=${GEOIPLICENSE}&suffix=tar.gz" -o GeoLite2-Country.tar.gz \ + && tar -xzvf GeoLite2-Country.tar.gz \ + && mv GeoLite2-Country_*/GeoLite2-Country.mmdb /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/maxmind_geoip + +wget https://guides.dataverse.org/en/latest/_downloads/a65ffc2dba9f406858591558ae92790c/setup-counter-processor.sh -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/setup-counter-processor.sh +wget https://guides.dataverse.org/en/latest/_downloads/fb16fe67897ad9fb85ec67bce5e6b83e/counter-processor-config.yaml -O /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION}/counter-processor-config.yaml + +curl -X PUT -d '/opt/payara/appserver/glassfish/domains/domain1/logs/makeDataCount' http://localhost:8080/api/admin/settings/:MDCLogPath +curl -X PUT -d 'false' http://localhost:8080/api/admin/settings/:DisplayMDCMetrics + +pip3 install -r requirements.txt --ignore-installed PyYAML +export ALLOWED_ENV=year_month + diff --git a/distros/dataverse.no/init.d/006-s3-aws-storage.sh b/distros/dataverse.no/init.d/006-s3-aws-storage.sh new file mode 100755 index 0000000..2ac349f --- /dev/null +++ b/distros/dataverse.no/init.d/006-s3-aws-storage.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# AWS Bucket for Dataverse +# https://guides.dataverse.org/en/latest/installation/config.html#id90 +if [ "${aws_bucket_name}" ]; then + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.type\=s3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.label\=S3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.bucket-name\=${aws_bucket_name}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.download-redirect\=true" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.upload-redirect=true" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=536870912" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.url-expiration-minutes\=120" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.connection-pool-size\=4096" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.storage-driver-id\=S3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.profile\=${aws_s3_profile}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.custom-endpoint-url\=${aws_endpoint_url}" + curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http" +fi diff --git a/distros/dataverse.no/init.d/007-s3-aws-storage.sh b/distros/dataverse.no/init.d/007-s3-aws-storage.sh new file mode 100755 index 0000000..fb1efa4 --- /dev/null +++ b/distros/dataverse.no/init.d/007-s3-aws-storage.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# AWS Bucket for Dataverse +# https://guides.dataverse.org/en/latest/installation/config.html#id90 +if [ "${aws_uit_bucket_name}" ]; then + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.type\=s3" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.label\=S3uit" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.bucket-name\=${aws_uit_bucket_name}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.download-redirect\=true" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.upload-redirect=true" + # asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.min-part-size=53687091200" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.url-expiration-minutes\=120" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.connection-pool-size\=4096" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.storage-driver-id\=S3uit" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.profile\=${aws_uit_s3_profile}" + # asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3uit.custom-endpoint-url\=${aws_endpoint_url}" + curl -X PUT "http://localhost:8080/api/admin/settings/:DownloadMethods" -d "native/http" +fi diff --git a/distros/dataverse.no/init.d/01-persistent-id.sh b/distros/dataverse.no/init.d/01-persistent-id.sh new file mode 100755 index 0000000..6484e33 --- /dev/null +++ b/distros/dataverse.no/init.d/01-persistent-id.sh @@ -0,0 +1,26 @@ +#!/bin/bash +echo "Setting up the settings" >> /tmp/status.log +echo "- Allow internal signup" >> /tmp/status.log +SERVER=http://${DATAVERSE_URL}/api +echo $SERVER +curl -X PUT -d https://site.uit.no/dataverseno/support/ "$SERVER/admin/settings/:NavbarSupportUrl" +curl -X PUT -d http://site.uit.no/dataverseno/deposit/ "$SERVER/admin/settings/:NavbarGuidesUrl" +curl -X PUT -d https://site.uit.no/dataverseno/deposit/deposit-your-data/#log-in "$SERVER/admin/settings/:GuidesBaseUrl" +curl -X PUT -d 'false' "$SERVER/admin/settings/:AllowSignUp" +curl -X PUT -d /dataverseuser.xhtml?editMode=CREATE "$SERVER/admin/settings/:SignUpUrl" +curl -X PUT -d CV "$SERVER/admin/settings/:CV" +curl -X PUT -d burrito $SERVER/admin/settings/BuiltinUsers.KEY +curl -X PUT -d localhost-only $SERVER/admin/settings/:BlockedApiPolicy +curl -X PUT -d 'native/http' $SERVER/admin/settings/:UploadMethods +curl -X PUT -d solr:8983 "$SERVER/admin/settings/:SolrHostColonPort" +curl -X PUT -d "" "$SERVER/admin/settings/:Shoulder" +echo + +# Demo server with FAKE DOIs if doi_authority is empty +if [ -z "${doi_authority}" ]; then + curl -X PUT -d doi "$SERVER/admin/settings/:Protocol" + curl -X PUT -d 10.5072 "$SERVER/admin/settings/:Authority" +# curl -X PUT -d "FK2/" "$SERVER/admin/settings/:Shoulder" + curl -X PUT -d FAKE "$SERVER/admin/settings/:DoiProvider" +fi + diff --git a/distros/dataverse.no/init.d/010-mailrelay-set.sh b/distros/dataverse.no/init.d/010-mailrelay-set.sh new file mode 100755 index 0000000..4e6ddb9 --- /dev/null +++ b/distros/dataverse.no/init.d/010-mailrelay-set.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# Setup mail relay +# https://guides.dataverse.org/en/latest/developers/troubleshooting.html +if [ "${system_email}" ]; then + curl -X PUT -d ${system_email} http://localhost:8080/api/admin/settings/:SystemEmail + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} delete-javamail-resource mail/notifyMailSession + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-javamail-resource --mailhost ${mailhost} --mailuser ${mailuser} --fromaddress ${no_reply_email} --property mail.smtp.auth=false:mail.smtp.password=${smtp_password}:mail.smtp.port=${smtp_port}:mail.smtp.socketFactory.port=${socket_port}:mail.smtp.socketFactory.fallback=false mail/notifyMailSession +fi diff --git a/distros/dataverse.no/init.d/011-local-storage.sh b/distros/dataverse.no/init.d/011-local-storage.sh new file mode 100755 index 0000000..59c2602 --- /dev/null +++ b/distros/dataverse.no/init.d/011-local-storage.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +# Enable file folder in local storage +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.type\=file" +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.label\=file" +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.file.directory\=/data" diff --git a/distros/dataverse.no/init.d/012-disable-imageMagick.sh b/distros/dataverse.no/init.d/012-disable-imageMagick.sh new file mode 100755 index 0000000..258b87c --- /dev/null +++ b/distros/dataverse.no/init.d/012-disable-imageMagick.sh @@ -0,0 +1,3 @@ +#!/bin/bash +mv /usr/bin/convert /usr/bin/convert.MOVED + diff --git a/distros/dataverse.no/init.d/021-jhove-set-link.sh b/distros/dataverse.no/init.d/021-jhove-set-link.sh new file mode 100755 index 0000000..6801cd0 --- /dev/null +++ b/distros/dataverse.no/init.d/021-jhove-set-link.sh @@ -0,0 +1,3 @@ +ln -s /opt/payara/dvinstall/jhove.conf /opt/payara/appserver/glassfish/domains/domain1/config/jhove.conf +ln -s /opt/payara/dvinstall/jhoveConfig.xsd /opt/payara/appserver/glassfish/domains/domain1/config/jhoveConfig.xsd + diff --git a/distros/dataverse.no/init.d/022-splitpath.sh b/distros/dataverse.no/init.d/022-splitpath.sh new file mode 100755 index 0000000..764420d --- /dev/null +++ b/distros/dataverse.no/init.d/022-splitpath.sh @@ -0,0 +1,3 @@ +#!/bin/bash + + #asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddataverse.files.S3.min-part-size=6553600" diff --git a/distros/dataverse.no/init.d/023-afilliation.sh b/distros/dataverse.no/init.d/023-afilliation.sh new file mode 100755 index 0000000..686b39a --- /dev/null +++ b/distros/dataverse.no/init.d/023-afilliation.sh @@ -0,0 +1,4 @@ +#!/bin/bash +curl -X PUT -d "affiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationAttribute +curl -X PUT -d "lastAffiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationOrder +#curl -X PUT -d "firstAffiliation" http://localhost:8080/api/admin/settings/:ShibAffiliationOrder diff --git a/distros/dataverse.no/init.d/024-curation-lables.sh b/distros/dataverse.no/init.d/024-curation-lables.sh new file mode 100755 index 0000000..a962ebf --- /dev/null +++ b/distros/dataverse.no/init.d/024-curation-lables.sh @@ -0,0 +1,3 @@ +#!/bin/bash +#curl -X PUT -d '{"Standard Process":["Curator Assigned", "In Curation", "Awaiting Reply", "Legal or Ethical Concerns", "Awaiting Final Approval", "In Double Blind Review", "Awaiting Article Publication", "Candidate for Deletion"], "Alternate Process":["State 1","State 2","State 3"]}' http://localhost:8080/api/admin/settings/:AllowedCurationLabels +curl -X PUT -d 'STATUSUPDATED' http://localhost:8080/api/admin/settings/:AlwaysMuted diff --git a/distros/dataverse.no/init.d/03-doi-set.sh b/distros/dataverse.no/init.d/03-doi-set.sh new file mode 100755 index 0000000..29d3781 --- /dev/null +++ b/distros/dataverse.no/init.d/03-doi-set.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# Setup DOI parameters +# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring +if [ "${doi_authority}" ]; then + curl -X PUT -d ${doi_authority} http://localhost:8080/api/admin/settings/:Authority + curl -X PUT -d ${doi_provider} http://localhost:8080/api/admin/settings/:DoiProvider + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.username\=${doi_username}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.password\=${doi_password}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.dataciterestapiurlstring\=${dataciterestapiurlstring}" + asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-Ddoi.baseurlstring\=${baseurlstring}" + if [ "${doi_shoulder}" ]; then + curl -X PUT -d "${doi_shoulder}/" "$SERVER/admin/settings/:Shoulder" + fi +fi diff --git a/distros/dataverse.no/init.d/04-setdomain.sh b/distros/dataverse.no/init.d/04-setdomain.sh new file mode 100755 index 0000000..053252e --- /dev/null +++ b/distros/dataverse.no/init.d/04-setdomain.sh @@ -0,0 +1,12 @@ +#!/bin/bash +# Setup domain name +hostname=${hostname} +echo $hostname +hostnamecmd=dataverse.fqdn=${hostname} +echo $hostnamecmd +siteURLcmd=dataverse.siteUrl='https\:\/\/'${hostname} +echo $siteURLcmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $siteURLcmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-system-properties $hostnamecmd +asadmin --user=${ADMIN_USER} --passwordfile=${PASSWORD_FILE} create-jvm-options "-D$siteURLcmd" + diff --git a/distros/dataverse.no/init.d/055-dvwebloader.sh b/distros/dataverse.no/init.d/055-dvwebloader.sh new file mode 100755 index 0000000..a7ef293 --- /dev/null +++ b/distros/dataverse.no/init.d/055-dvwebloader.sh @@ -0,0 +1,4 @@ +#!/bin/bash +curl -X PUT -d 'native/http,dvwebloader' http://localhost:8080/api/admin/settings/:UploadMethods +#curl -X PUT -d 'https://gdcc.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl +curl -X PUT -d 'https://dataverseno.github.io/dvwebloader/src/dvwebloader.html' http://localhost:8080/api/admin/settings/:WebloaderUrl diff --git a/distros/dataverse.no/init.d/08-federated-login.sh b/distros/dataverse.no/init.d/08-federated-login.sh new file mode 100755 index 0000000..31208cd --- /dev/null +++ b/distros/dataverse.no/init.d/08-federated-login.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Federated login activation +# https://guides.dataverse.org/en/latest/installation/shibboleth.html +if [ "${federated_json_file}" ]; then + curl -X POST -H 'Content-type: application/json' --upload-file ${federated_json_file} http://localhost:8080/api/admin/authenticationProviders +fi diff --git a/distros/dataverse.no/init.d/081-azure-login.sh b/distros/dataverse.no/init.d/081-azure-login.sh new file mode 100755 index 0000000..bfef879 --- /dev/null +++ b/distros/dataverse.no/init.d/081-azure-login.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Federated login activation +# https://guides.dataverse.org/en/latest/installation/shibboleth.html +if [ "${azure_json_file}" ]; then + curl -X POST -H 'Content-type: application/json' --upload-file ${azure_json_file} http://localhost:8080/api/admin/authenticationProviders +fi diff --git a/distros/dataverse.no/init.d/082-orcid.sh b/distros/dataverse.no/init.d/082-orcid.sh new file mode 100644 index 0000000..6d97fa1 --- /dev/null +++ b/distros/dataverse.no/init.d/082-orcid.sh @@ -0,0 +1,8 @@ +#!/bin/bash + + +# Federated login activation +# https://guides.dataverse.org/en/latest/installation/shibboleth.html +if [ "${orcid_json_file}" ]; then + curl -X POST -H 'Content-type: application/json' --upload-file ${orcid_json_file} http://localhost:8080/api/admin/authenticationProviders +fi diff --git a/distros/dataverse.no/init.d/100-analytics.sh b/distros/dataverse.no/init.d/100-analytics.sh new file mode 100755 index 0000000..bede100 --- /dev/null +++ b/distros/dataverse.no/init.d/100-analytics.sh @@ -0,0 +1,6 @@ +curl -z -o $DOCROOT_DIR/analytics.xhtml https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/analytics.xhtml + +if [ ! -z "$WEBANALYTICSON" ] +then + curl -X PUT -d $DOCROOT_DIR/analytics.xhtml http://localhost:8080/api/admin/settings/:WebAnalyticsCode +fi diff --git a/distros/dataverse.no/init.d/101-header-footer-custumisation.sh b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh new file mode 100755 index 0000000..4fa5313 --- /dev/null +++ b/distros/dataverse.no/init.d/101-header-footer-custumisation.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/custom-header.html -O /tmp/custom-header.html +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/custom-footer.html -O /tmp/custom-footer.html + +curl -X PUT -d '/logos/navbar/logo.png' http://localhost:8080/api/admin/settings/:LogoCustomizationFile +#curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile +curl -X PUT -d '/tmp/custom-footer.html' http://localhost:8080/api/admin/settings/:FooterCustomizationFile +#curl -X PUT -d http://site.uit.no/dataverseno/deposit/ http://localhost:8080/api/admin/settings/:GuidesBaseUrl +#curl -X PUT -d '' http://localhost:8080/api/admin/settings/:GuidesVersion +curl -X PUT -d https://site.uit.no/dataverseno/support/ http://localhost:8080/api/admin/settings/:NavbarSupportUrl +curl -X PUT -d https://site.uit.no/dataverseno/about/policy-framework/access-and-use-policy/ http://localhost:8080/api/admin/settings/:ApplicationPrivacyPolicyUrl + +#file.dataFilesTab.metadata.header=Metadata + +if [ ! -z ${TESTBANNER+x} ]; + then + curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile + else + curl -X PUT -d '' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile + fi + diff --git a/distros/dataverse.no/init.d/201-bundle.sh b/distros/dataverse.no/init.d/201-bundle.sh new file mode 100755 index 0000000..004e259 --- /dev/null +++ b/distros/dataverse.no/init.d/201-bundle.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +/usr/bin/apt-get install patch -y +curl -z -o $DOCROOT_DIR/Bundle.properties.patch https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/Bundle.properties.patch +/usr/bin/patch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/propertyFiles/Bundle.properties $DOCROOT_DIR/Bundle.properties.patch diff --git a/distros/dataverse.no/init.d/204-custumisation.sh b/distros/dataverse.no/init.d/204-custumisation.sh new file mode 100755 index 0000000..6eb9af1 --- /dev/null +++ b/distros/dataverse.no/init.d/204-custumisation.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +#curl -X PUT -d '/logos/navbar/logo.png' http://localhost:8080/api/admin/settings/:LogoCustomizationFile +#curl -X PUT -d '/tmp/custom-header.html' http://localhost:8080/api/admin/settings/:HeaderCustomizationFile +#curl -X PUT -d '/tmp/custom-footer.html' http://localhost:8080/api/admin/settings/:FooterCustomizationFile + + +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/loginpage.xhtml -O /tmp/loginpage.xhtml +#wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/dataverse_header.xhtml -O /tmp/dataverse_header.xhtml +#wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/modification/dataverse_footer.xhtml -O /tmp/dataverse_footer.xhtml + + +cp /tmp/loginpage.xhtml /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/loginpage.xhtml +#cp /tmp/dataverse_header.xhtml /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/dataverse_header.xhtml +#cp /tmp/dataverse_footer.xhtml /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/dataverse_footer.xhtml + diff --git a/distros/dataverse.no/init.d/205-backup-sql-fuctions.sh b/distros/dataverse.no/init.d/205-backup-sql-fuctions.sh new file mode 100755 index 0000000..17e049b --- /dev/null +++ b/distros/dataverse.no/init.d/205-backup-sql-fuctions.sh @@ -0,0 +1,4 @@ +#!/bin/bash +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backup-fuction.sql +#select storageidentifier from dvobject where modificationtime>='2023-02-02'; diff --git a/distros/dataverse.no/init.d/211-microprofile.sh b/distros/dataverse.no/init.d/211-microprofile.sh new file mode 100644 index 0000000..4bf9373 --- /dev/null +++ b/distros/dataverse.no/init.d/211-microprofile.sh @@ -0,0 +1,4 @@ +#!/bin/bash +wget https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/configs/microprofile-config.properties -O /tmp/microprofile-config.properties +cp /tmp/microprofile-config.properties /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/WEB-INF/classes/META-INF/microprofile-config.properties +touch /opt/payara/appserver/glassfish/domains/domain1/applications/dataverse/.reload diff --git a/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp b/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp new file mode 100644 index 0000000..4db26fc Binary files /dev/null and b/distros/dataverse.no/init.d/affiliations/.updatetrigger.sql.swp differ diff --git a/distros/dataverse.no/init.d/affiliations/affiliation2data.py b/distros/dataverse.no/init.d/affiliations/affiliation2data.py new file mode 100644 index 0000000..c889325 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/affiliation2data.py @@ -0,0 +1,36 @@ +import pandas as pd +import json + +# id | dvno_affiliation | dvno_group_name | dvno_email_level +#-----+-------------------------------------+------------------+------------------ +# 139 | UiT The Arctic University of Norway | uit.no | 2 +# 27 | Ostfold University College | hiof.no | 2 +# 4 | Akvaplan-niva | akvaplan.niva.no | 3 + +localfile = '/distrib/private/affiliations.csv' +URLaff = 'https://raw.githubusercontent.com/DataverseNO/dataverse-docker/dataverse.no/distros/dataverse.no/init.d/affiliations/affiliations.csv' + +#print(pd.read_csv(open(file, errors='replace'))) +def reload_affiliations(loc): + affiliations = pd.read_csv(loc) + for i in affiliations.index: + #print(affiliations.iloc[[i]]['dvno_group_name']) + #print("%s %s" % (affiliations.iloc[[i]]['dvno_group_name'].astype(str), affiliations.iloc[[i]]['dvno_affiliation'].astype(str))) + #print(str(affiliations.iloc[[i]]['id'].values[0])) + #print(str(affiliations.iloc[[i]]['dvno_group_name'].values[0])) + #print(str(affiliations.iloc[[i]]['dvno_affiliation'].values[0])) + dvno_email_level = len(str(affiliations.iloc[[i]]['dvno_group_name']).split('.')) + #print(subdomains) + affiliation = affiliations.iloc[[i]]['dvno_affiliation'].values[0] + affiliation = str(affiliation).replace("'", "\'\'") + #affiliation = "q['%s']" % affiliation + sql = "insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('%s', '%s', '%s', '%s');" % (affiliations.iloc[[i]]['id'].values[0], affiliation, affiliations.iloc[[i]]['dvno_group_name'].values[0], dvno_email_level) + print(sql) + return + +try: + reload_affiliations(URLaff) +except: + #print("URL %s doesn't exist\n" % URLaff) + reload_affiliations(localfile) + diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.csv b/distros/dataverse.no/init.d/affiliations/affiliations.csv new file mode 100644 index 0000000..c51d91e --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/affiliations.csv @@ -0,0 +1,156 @@ +id,dvno_group_name,dvno_group_id,dvno_group_id_explicit,dvno_affiliation,dvno_abbreviation,dvno_alias,bucketname_in_cloudian,bucketname_in_dataverseno,feide_id,feide_email,feide_name,feide_nb,feide_nn,feide_en,feide_se,ror_id,ror_name,ror_acronym,comments +1,phs.no,phsno,&explicit/1-phsno,The Norwegian Police University College,PHS,,,,54,phs.no,The Norwegian Police University College,Politihøgskolen,Politihøgskulen,The Norwegian Police University College,,https://ror.org/05486d596,Norwegian Police University College,PHS, +2,uia.no,uiano,&explicit/1-uiano,University of Agder,UiA,uia,2002-red-dvno,cloudian-dvno,55,uia.no,University of Agder,Universitetet i Agder,Universitetet i Agder,University of Agder,Agder Universitehta,https://ror.org/03x297z98,University of Agder,, +3,nifu.no,nifuno,&explicit/1-nifuno,"Nordic Institute for Studies innovation, research and education",NIFU,,,,58,nifu.no,NIFU,NIFU,NIFU,NIFU,,,,, +4,hiof.no,hiofno,&explicit/1-hiofno,Østfold University College,HiØ,hiof,2002-red-dvno,cloudian-dvno,70,hiof.no,Østfold University College,Høgskolen i Østfold,Høgskulen i Østfold,Østfold University College,Østfold Allaskuvla,https://ror.org/04gf7fp41,Østfold University College,HiØ, +5,aho.no,ahono,&explicit/1-ahono,The Oslo School of Architecture and Design,AHO,,,,93,aho.no,The Oslo School of Architecture and Design,Arkitektur- og designhøgskolen i Oslo,Arkitektur- og designhøgskulen i Oslo,The Oslo School of Architecture and Design,,,,, +6,cmi.no,cmino,&explicit/1-cmino,Chr. Michelsen Institute,CMI,,,,96,cmi.no,Chr. Michelsen Institute,Chr. Michelsens Institutt,Chr. Michelsens Institutt,Chr. Michelsen Institute,,https://ror.org/02w7rbf39,Christian Michelsen Institute,CMI, +7,mf.no,mfno,&explicit/1-mfno,"MF Norwegian School of Theology, Religion and Society",MF,mf,2002-red-dvno,cloudian-dvno,100,mf.no,"MF Norwegian School of Theology, Religion and Society",MF vitenskapelig høyskole,MF vitenskapelig høyskole,"MF Norwegian School of Theology, Religion and Society",,https://ror.org/01qafy255,"MF Norwegian School of Theology, Religion and Society",, +8,dmmh.no,dmmhno,&explicit/1-dmmhno,Queen Maud University College,DMMH,,,,103,dmmh.no,Queen Maud University College,Dronning Mauds Minne Høgskole,Dronning Mauds Minne Høgskule,Queen Maud University College,,https://ror.org/043zemc40,Queen Maud University College,DMMH, +9,nhh.no,nhhno,&explicit/1-nhhno,Norwegian School of Economics,NHH,,,,119,nhh.no,Norwegian School of Economics,Norges Handelshøyskole,Norges Handelshøyskole,Norwegian School of Economics,,https://ror.org/04v53s997,Norwegian School of Economics,NHH, +10,nla.no,nlano,&explicit/1-nlano,NLA University College,NLA,,,,123,nla.no,NLA University College,NLA Høgskolen,NLA Høgskolen,NLA University College,NLA Høgskolen,https://ror.org/05fdt2q64,NLA University College,, +11,npolar.no,npolarno,&explicit/1-npolarno,Norwegian Polar Institute,NPI,,,,124,,Norwegian Polar Institute,Norsk Polarinstitutt,,Norwegian Polar Institute,,https://ror.org/03avf6522,Norwegian Polar Institute,, +12,nr.no,nrno,&explicit/1-nrno,Norwegian Computing Center,NR,,,,125,nr.no,Norsk Regnesentral,Norsk Regnesentral,,,,https://ror.org/02gm7te43,Norwegian Computing Center,NR, +13,sintef.no,sintefno,&explicit/1-sintefno,SINTEF,SINTEF,,,,131,sintef.no,SINTEF,SINTEF,SINTEF,SINTEF,,https://ror.org/01f677e56,SINTEF,, +14,samiskhs.no,samiskhsno,&explicit/1-samiskhsno,Sámi allaskuvla – Sámi University College,Sami,,,,133,samas.no,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,Sámi allaskuvla – Samisk høgskule,Sámi allaskuvla – Sámi University College,Sámi allaskuvla – Samisk høgskole,https://ror.org/028ahgk39,Sámi University of Applied Sciences,, +15,uib.no,uibno,&explicit/1-uibno,University of Bergen,UiB,uib,2002-red-dvno,cloudian-dvno,137,uib.no,University of Bergen,Universitetet i Bergen,Universitetet i Bergen,University of Bergen,Bergen Universitehta,https://ror.org/03zga2b32,University of Bergen,, +16,uio.no,uiono,&explicit/1-uiono,University of Oslo,UiO,uio,2002-red-dvno,cloudian-dvno,138,uio.no,University of Oslo,Universitetet i Oslo,Universitetet i Oslo,University of Oslo,Oslo Universitehta,https://ror.org/01xtthb56,University of Oslo,UiO, +17,uit.no,uitno,&explicit/1-uitno,UiT The Arctic University of Norway,UiT,uit,2002-red-dvno,cloudian-dvno,139,uit.no,UiT The Arctic University of Norway,UiT Norges arktiske universitet,UiT Noregs arktiske universitet,UiT The Arctic University of Norway,UiT Norgga árktalaš universitehta,https://ror.org/00wge5k78,UiT The Arctic University of Norway,UiT, +18,ntnu.no,ntnuno,&explicit/1-ntnuno,NTNU – Norwegian University of Science and Technology,NTNU,ntnu,2002-red-dvno,cloudian-dvno,140,ntnu.no,NTNU,NTNU,,NTNU,,https://ror.org/05xg72x27,Norwegian University of Science and Technology,NTNU, +19,nina.no,ninano,&explicit/1-ninano,Norwegian Institute for Nature Research,NINA,,,,144,nina.no,NINA,NINA,NINA,,,https://ror.org/04aha0598,Norwegian Institute for Nature Research,NINA, +20,ngu.no,nguno,&explicit/1-nguno,Geological Survey of Norway,NGU,,,,145,ngu.no,Geological Survey of Norway,Norges geologiske undersøkelse,Norges geologiske undersøkelse,Geological Survey of Norway,,,,, +21,himolde.no,himoldeno,&explicit/1-himoldeno,Molde University College,HiM,,,,158,himolde.no,Molde University College,Høgskolen i Molde,Høgskulen i Molde,Molde University College,Molde Allaskuvla,https://ror.org/00kxjcd28,Molde University College,HiM, +22,nb.no,nbno,&explicit/1-nbno,National Library of Norway,NB,,,,160,nb.no,National Library of Norway,Nasjonalbiblioteket,Nasjonalbiblioteket,National Library of Norway,,,,, +23,uis.no,uisno,&explicit/1-uisno,University of Stavanger,UiS,uis,2002-red-dvno,cloudian-dvno,163,uis.no,University of Stavanger,Universitetet i Stavanger,Universitetet i Stavanger,University of Stavanger,Stavanger Universitehta,https://ror.org/02qte9q33,University of Stavanger,UiS, +24,hivolda.no,hivoldano,&explicit/1-hivoldano,Volda University College,HVO,,,,165,hivolda.no,Volda University College,Høgskulen i Volda,Høgskulen i Volda,Volda University College,Volda Allaskuvla,https://ror.org/01eeqzy24,Volda University College,HVO, +25,khio.no,khiono,&explicit/1-khiono,Oslo National Academy of the Arts,KhiO,,,,195,khio.no,Oslo National Academy of the Arts,Kunsthøgskolen i Oslo,Kunsthøgskulen i Oslo,Oslo National Academy of the Arts,,https://ror.org/0543h9a62,Oslo National Academy of the Arts,, +26,samfunnsforskning.no,samfunnsforskningno,&explicit/1-samfunnsforskningno,Institute for Social Research,IFS,,,,197,samfunnsforskning.no,Institute for Social Research,Institutt for samfunnsforskning,,Institute for Social Research,,https://ror.org/05swz5441,Institute for Social Research,IFS, +27,ldh.no,ldhno,&explicit/1-ldhno,Lovisenberg Diaconal University College,LDH,,,,216,ldh.no,Lovisenberg diaconal university college,Lovisenberg diakonale høgskole,Lovisenberg diakonale høgskule,Lovisenberg diaconal university college,,https://ror.org/015rzvz05,Lovisenberg Diakonale Høgskole,LDH, +28,fhi.no,fhino,&explicit/1-fhino,Norwegian Institute of Public Health,NIPH,,,,310,fhi.no,Norwegian Institute of Public Health,Folkehelseinstituttet,,Norwegian Institute of Public Health,,https://ror.org/046nvst19,Norwegian Institute of Public Health,NIPH, +29,nih.no,nihno,&explicit/1-nihno,Norwegian School of Sport Sciences,NSSS,,,,323,nih.no,Norwegian School of Sport Sciences,Norges idrettshøgskole,Noregs idrettshøgskule,Norwegian School of Sport Sciences,,https://ror.org/045016w83,Norwegian School of Sport Sciences,NSSS, +30,bi.no,bino,&explicit/1-bino,BI Norwegian Business School,BI,,,,324,bi.no,BI Norwegian Business School,Handelshøyskolen BI,Handelshøyskolen BI,BI Norwegian Business School,,https://ror.org/03ez40v33,BI Norwegian Business School,, +31,nmh.no,nmhno,&explicit/1-nmhno,Norwegian Academy of Music,NMH,,,,325,nmh.no,Norwegian Academy of Music,Norges musikkhøgskole,Noregs musikkhøgskule,Norwegian Academy of Music,,https://ror.org/052dy9793,Norwegian Academy of Music,NMH, +32,kristiania.no,kristianiano,&explicit/1-kristianiano,Kristiania University College,Kristiania,,,,17007,feide.egms.no,Kristiania University College,Høyskolen Kristiania,Høyskolen Kristiania,Kristiania University College,,https://ror.org/03gss5916,Campus Kristiania,, +33,fhs.mil.no,fhsmilno,&explicit/1-fhsmilno,Norwegian Defence University College,NDUC,,,,115267,mil.no,Norwegian Defence University College,Forsvarets høgskoler,Forsvarets høgskuler,Norwegian Defence University College,,https://ror.org/02vfz9j23,Norwegian Defence University College,NDUC, +34,ansgarskolen.no,ansgarskolenno,&explicit/1-ansgarskolenno,Ansgar University College,Ansgar,,,,120177,ansgarhogskole.no,Ansgar University College,Ansgar høyskole,,Ansgar University College,,https://ror.org/05y8hw592,Ansgar Bibelskole,, +35,oslomet.no,oslometno,&explicit/1-oslometno,OsloMet – Oslo Metropolitan University,OsloMet,,,,120186,oslomet.no,OsloMet – Oslo Metropolitan University,OsloMet – storbyuniversitetet,OsloMet – storbyuniversitetet,OsloMet – Oslo Metropolitan University,OsloMet – stuorragávpotuniversitehta,https://ror.org/04q12yn84,OsloMet – Oslo Metropolitan University,HiOA, +36,nmbu.no,nmbuno,&explicit/1-nmbuno,Norwegian University of Life Sciences (NMBU),NMBU,nmbu,2002-red-dvno,cloudian-dvno,1777926,nmbu.no,Norwegian University of Life Sciences,Norges miljø- og biovitenskapelige universitet,Noregs miljø- og biovitenskapelige universitet,Norwegian University of Life Sciences,,https://ror.org/04a1mvv97,Norwegian University of Life Sciences,NMBU, +37,nibio.no,nibiono,&explicit/1-nibiono,Norwegian Institute of Bioeconomy Research,NIBIO,nibio,2002-red-dvno,cloudian-dvno,2052113,nibio.no,Nibio,Nibio,,,,https://ror.org/04aah1z61,Norwegian Institute of Bioeconomy Research,NIBIO, +38,vid.no,vidno,&explicit/1-vidno,VID Specialized University,VID,vid,2002-red-dvno,cloudian-dvno,2064538,vid.no,VID Specialized University,VID vitenskapelige høgskole,VID vitenskapelige høgskule,VID Specialized University,,https://ror.org/0191b3351,VID Specialized University,VID, +39,nord.no,nordno,&explicit/1-nordno,Nord University,NORD,nord,2002-red-dvno,cloudian-dvno,2066644,nord.no,Nord University,Nord universitet,Nord universitet,Nord University,,https://ror.org/030mwrt98,Nord University,, +40,usn.no,usnno,&explicit/1-usnno,University of South-Eastern Norway,USN,,,,2066647,usn.no,University of South-Eastern Norway,Universitetet i Sørøst-Norge,Universitetet i Søraust-Noreg,University of South-Eastern Norway,,https://ror.org/05ecg5h20,University of South-Eastern Norway,USN, +41,hvl.no,hvlno,&explicit/1-hvlno,Western Norway University of Applied Sciences,HVL,hvl,2002-red-dvno,cloudian-dvno,2126357,hvl.no,Western Norway University of Applied Sciences,Høgskulen på Vestlandet,Høgskulen på Vestlandet,Western Norway University of Applied Sciences,,https://ror.org/05phns765,Western Norway University of Applied Sciences,HVL, +42,nkvts.no,nkvtsno,&explicit/1-nkvtsno,Norwegian centre for violence and traumatic stress studies,NKVTS,,,,2127917,nkvts.no,Norwegian centre for violence and traumatic stress studies,Nasjonalt kunnskapssenter om vold og traumatisk stress,,Norwegian centre for violence and traumatic stress studies,,https://ror.org/01p618c36,Norwegian Centre for Violence and Traumatic Stress Studies,NKVTS, +43,inn.no,innno,&explicit/1-innno,Inland Norway University of Applied Sciences,INN,inn,2002-red-dvno,cloudian-dvno,2128215,inn.no,Inland Norway University of Applied Sciences,Høgskolen i Innlandet,,Inland Norway University of Applied Sciences,,https://ror.org/02dx4dc92,Inland Norway University of Applied Sciences,, +44,vetinst.no,vetinstno,&explicit/1-vetinstno,Norwegian Veterinary Institute,NVI,,,,2217125,vetinst.no,Veterinærinstituttet,Veterinærinstituttet,,,,https://ror.org/05m6y3182,Norwegian Veterinary Institute,NVI, +45,nubu.no,nubuno,&explicit/1-nubuno,NUBU - The Norwegian Center for Child Behavioral Development,NUBU,,,,2217221,nubu.no,NUBU - The Norwegian Center for Child Behavioral Development,NUBU - Nasjonalt utviklingssenter for barn og unge,,NUBU - The Norwegian Center for Child Behavioral Development,,,,, +46,hlsenteret.no,hlsenteretno,&explicit/1-hlsenteretno,The Norwegian Center for Holocaust and Minority Studies,HLS,,,,2217222,hlsenteret.no,The Norwegian Center for Holocaust and Minority Studies,Senter for studier av Holocaust og livssynsminoriteter,,The Norwegian Center for Holocaust and Minority Studies,,https://ror.org/03ppkyp25,Center for Studies of Holocaust and Religious Minorities,, +47,met.no,metno,&explicit/1-metno,Norwegian Meteorological Institute,MET,,,,2217341,,Meteorologisk Institutt,Meteorologisk Institutt,,,,https://ror.org/001n36p86,Norwegian Meteorological Institute,MET, +48,simula.no,simulano,&explicit/1-simulano,Simula Research Laboratory,Simula,,,,2217477,simula.no,Simula,Simula,,,,https://ror.org/00vn06n10,Simula Research Laboratory,, +49,agderforskning.no,agderforskningno,&explicit/1-agderforskningno,Agder Research,Agder,,,,,,,,,,,https://ror.org/02k3w5n89,Agder Research,, +50,akvaplan.niva.no,akvaplannivano,&explicit/1-akvaplannivano,Akvaplan-niva,Akvaplan,,,,,,,,,,,https://ror.org/03nrps502,Akvaplan-niva (Norway),, +51,arbark.no,arbarkno,&explicit/1-arbarkno,Norwegian Labour Movement Archives and Library,ARBARK,,,,,,,,,,,https://ror.org/05x91m338,Norwegian Labour Movement Archives and Library,, +52,cas.oslo.no,casoslono,&explicit/1-casoslono,Centre for Advanced Study,CAS,,,,,,,,,,,https://ror.org/05rbhea42,Centre for Advanced Study,CAS, +53,cicero.oslo.no,cicerooslono,&explicit/1-cicerooslono,CICERO Center for International Climate Research,CICERO,,,,,,,,,,,https://ror.org/01gw5dy53,CICERO Center for International Climate Research,CICERO, +54,cmr.no,cmrno,&explicit/1-cmrno,Christian Michelsen Research,CMR,,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre +55,dataverse.no,dataverseno,&explicit/1-dataverseno,,_Ikkje-partnarar,root,2002-red-dvno,cloudian-dvno,,,,,,,,,,,The storage bucket 2002-red-dataverseno-dvno / cloudian-dvno is used for the root/top collection. +56,DataverseNO Admin,DataverseNOAdmin,&explicit/1-DataverseNOAdmin,,,,,,,,,,,,,,,, +57,DataverseNO Curator,DvNOCurator,&explicit/1-DvNOCurator,,,,,,,,,,,,,,,, +58,DataverseNO Dataset Creator,DataverseNODatasetCreator,&explicit/1-DataverseNODatasetCreator,,,,,,,,,,,,,,,, +59,diakonova.no,diakonovano,&explicit/1-diakonovano,Diakonova,Diakonova,,,,,,,,,,,,,, +60,fafo.no,fafono,&explicit/1-fafono,Fafo Foundation,Fafo,,,,,,,,,,,https://ror.org/00ee9xb13,Fafo Foundation,, +61,ffi.no,ffino,&explicit/1-ffino,Norwegian Defence Research Establishment,FFI,,,,,,,,,,,https://ror.org/0098gnz32,Norwegian Defence Research Establishment,FFI, +62,flymed.no,flymedno,&explicit/1-flymedno,Flymedisinsk institutt,Flymed,,,,,,,,,,,,,, +63,fni.no,fnino,&explicit/1-fnino,Fridtjof Nansen Institute,FNI,,,,,,,,,,,https://ror.org/04ep2t954,Fridtjof Nansen Institute,FNI, +64,genok.no,genokno,&explicit/1-genokno,GenØk – Centre for Biosafety,GenØk,,,,,,,,,,,https://ror.org/027arfy53,GenØk,, +65,hi.no,hino,&explicit/1-hino,Norwegian Institute of Marine Research,IMR,,,,,,,,,,,https://ror.org/05vg74d16,Norwegian Institute of Marine Research,IMR, +66,ife.no,ifeno,&explicit/1-ifeno,Institute for Energy Technology,IFE,,,,,,,,,,,https://ror.org/02jqtg033,Institute for Energy Technology,IFE, +67,iris.no,irisno,&explicit/1-irisno,International Research Institute of Stavanger,IRIS,,,,,,,,,,,https://ror.org/0502t5s28,International Research Institute of Stavanger,IRIS,Now part of NORCE Norwegian Research Centre +68,kifo.no,kifono,&explicit/1-kifono,"Institute for Church, Religion, and Worldview Research",KIFO,,,,,,,,,,,https://ror.org/051p4t773,"Institute for Church, Religion, and Worldview Research",KIFO, +69,kreftregisteret.no,kreftregisteretno,&explicit/1-kreftregisteretno,Cancer Registry of Norway,CRN,,,,,,,,,,,https://ror.org/03sm1ej59,Cancer Registry of Norway,CRN, +70,legeforeningen.no,legeforeningenno,&explicit/1-legeforeningenno,Den norske legeforening,Lege,,,,,,,,,,,,,, +71,moreforsk.no,moreforskno,&explicit/1-moreforskno,Møreforsking,Møre,,,,,,,,,,,https://ror.org/02w4kss89,Møreforsking (Norway),, +72,nersc.no,nerscno,&explicit/1-nerscno,Nansen Environmental and Remote Sensing Center,NERSC,,,,,,,,,,,,,, +73,nfms.no,nfmsno,&explicit/1-nfmsno,Aeromedical Center of Norway,NMFS,,,,,,,,,,,,,, +74,nforsk.no,nforskno,&explicit/1-nforskno,Nordland Research Institute,Nordland,,,,,,,,,,,https://ror.org/02wvb2a30,Nordland Research Institute,, +75,ngi.no,ngino,&explicit/1-ngino,Norwegian Geotechnical Institute,NGI,,,,,,,,,,,https://ror.org/032ksge37,Norwegian Geotechnical Institute,NGI, +76,niku.no,nikuno,&explicit/1-nikuno,Norwegian Institute for Cultural Heritage Research,NIKU,,,,,,,,,,,https://ror.org/02xhrye98,Norwegian Institute for Cultural Heritage Research,NIKU, +77,nilu.no,niluno,&explicit/1-niluno,Norwegian Institute for Air Research,NILU,,,,,,,,,,,https://ror.org/00q7d9z06,Norwegian Institute for Air Research,NILU, +78,niva.no,nivano,&explicit/1-nivano,Norwegian Institute for Water Research,NIVA,,,,,,,,,,,https://ror.org/03hrf8236,Norwegian Institute for Water Research,NIVA, +79,nlr.no,nlrno,&explicit/1-nlrno,Norsk Landbruksrådgiving,NLR,,,,,,,,,,,https://ror.org/03c1zct07,Norsk Landbruksrådgiving,NLR, +80,nobel.no,nobelno,&explicit/1-nobelno,Norwegian Nobel Institute,Nobel,,,,,,,,,,,https://ror.org/055wgnw59,Norwegian Nobel Institute,, +81,nofima.no,nofimano,&explicit/1-nofimano,Nofima,Nofima,nofima,2002-red-dvno,cloudian-dvno,,,,,,,,https://ror.org/02v1rsx93,Nofima,, +82,norceresearch.no,norceresearchno,&explicit/1-norceresearchno,NORCE Norwegian Research Centre,NORCE,,,,,,,,,,,https://ror.org/02gagpf75,Norwegian Research Centre,NORCE, +83,norsar.no,norsarno,&explicit/1-norsarno,Norwegian Seismic Array,NORSAR,,,,,,,,,,,https://ror.org/02vw8cm83,Norsar,, +84,norsok.no,norsokno,&explicit/1-norsokno,Norwegian Centre for Organic Agriculture,NORSØK,,,,,,,,,,,,,, +85,norsus.no,norsusno,&explicit/1-norsusno,Norwegian Institute for Sustainability Research,NORSUS,,,,,,,,,,,,,, +86,norut.no,norutno,&explicit/1-norutno,Norut Northern Research Institute,Norut,,,,,,,,,,,,,,Now part of NORCE Norwegian Research Centre +87,nupi.no,nupino,&explicit/1-nupino,Norwegian Institute of International Affairs,NUPI,,,,,,,,,,,https://ror.org/01pznaa94,Norwegian Institute of International Affairs,NUPI, +88,ostfoldforskning.no,ostfoldforskningno,&explicit/1-ostfoldforskningno,Ostfold Research,Østfold,,,,,,,,,,,https://ror.org/01vmqaq17,Ostfold Research (Norway),,Has changed name to +89,ostforsk.no,ostforskno,&explicit/1-ostforskno,Eastern Norway Research Institute,ENRI,,,,,,,,,,,https://ror.org/020deqr47,Eastern Norway Research Institute,ENRI, +90,pfi.no,pfino,&explicit/1-pfino,Paper and Fibre Research Institute,PFI,,,,,,,,,,,https://ror.org/053qb6g74,Paper and Fibre Research Institute,PFI, +91,prio.org,prioorg,&explicit/1-prioorg,Peace Research Institute,PRIO,,,,,,,,,,,https://ror.org/04dx54y73,Peace Research Institute,PRIO, +92,risefr.no,risefrno,&explicit/1-risefrno,RISE Fire Research,RISE,,,,,,,,,,,,,, +93,ruralis.no,ruralisno,&explicit/1-ruralisno,Institute for Rural and Regional Research,CRR,,,,,,,,,,,https://ror.org/0169gd037,Centre for Rural Research,CRR,Note: The ROR entry is not up to date. +94,sik.no,sikno,&explicit/1-sikno,Centre for Intercultural Communication,SIK,,,,,,,,,,,,,,Now part of VID +95,snf.no,snfno,&explicit/1-snfno,Centre for Applied Research,SNF,,,,,,,,,,,,,, +96,stami.no,stamino,&explicit/1-stamino,National Institute of Occupational Health,NIOH,,,,,,,,,,,https://ror.org/04g3t6s80,National Institute of Occupational Health,NIOH, +97,teknova.no,teknovano,&explicit/1-teknovano,Teknova,Teknova,,,,,,,,,,,https://ror.org/02ekw8p73,Teknova,,Now part of NORCE Norwegian Research Centre +98,tel-tek.no,tel-tekno,&explicit/1-tel-tekno,Tel-Tek,Tel-Tek,,,,,,,,,,,,,,Now part of SINTEF +99,tfou.no,tfouno,&explicit/1-tfouno,Trøndelag Forskning og Utvikling,TFOU,,,,,,,,,,,https://ror.org/01hw8wm79,Trøndelag Forskning og Utvikling (Norway),TFOU,Now part of SINTEF +100,tisip.no,tisipno,&explicit/1-tisipno,TISIP,TISIP,,,,,,,,,,,,,, +101,tmforsk.no,tmforskno,&explicit/1-tmforskno,Telemark Research Institute,TRI,,,,,,,,,,,https://ror.org/02jjgkb92,Telemark Research Institute,TRI, +102,toi.no,toino,&explicit/1-toino,Institute of Transport Economics,TØI,,,,,,,,,,,https://ror.org/04p2pa451,Institute of Transport Economics,TØI, +103,treteknisk.no,tretekniskno,&explicit/1-tretekniskno,Norwegian Institute of Wood Technology,NTI,,,,,,,,,,,https://ror.org/00d5qfz16,Norwegian Institute of Wood Technology,NTI, +104,uni.no,unino,&explicit/1-unino,Uni Research,Uni,,,,,,,,,,,https://ror.org/016tr2j79,Uni Research (Norway),,Now part of NORCE Norwegian Research Centre +105,vestforsk.no,vestforskno,&explicit/1-vestforskno,Western Norway Research Institute,WRNI,,,,,,,,,,,https://ror.org/04z1q2j11,Vestlandsforsking,WRNI, +106,westerdals.no,westerdalsno,&explicit/1-westerdalsno,"Westerdals Oslo School of Arts, Communication and Technology",Westerdals,,,,,,,,,,,https://ror.org/02re25503,"Westerdals Oslo School of Arts, Communication and Technology",,Now part of Kristiania +107,unn.no,unnno,&explicit/1-unnno,University Hospital of North Norway,UNN,,,,,,,,,,,https://ror.org/030v5kp38,University Hospital of North Norway,UNN, +108,helse-vest.no,helse-vestno,&explicit/1-helse-vestno,Western Norway Regional Health Authority,Helse Vest,,,,,,,,,,,https://ror.org/001212e83,Western Norway Regional Health Authority,, +109,helse-forde.no,helse-fordeno,&explicit/1-helse-fordeno,Helse Førde,Helse Førde,,,,,,,,,,,https://ror.org/05dzsmt79,Helse Førde,, +110,helse-bergen.no,helse-bergenno,&explicit/1-helse-bergenno,Helse Bergen,Helse Bergen,,,,,,,,,,,,,, +111,helse-fonna.no,helse-fonnano,&explicit/1-helse-fonnano,Helse Fonna,Helse Fonna,,,,,,,,,,,,,, +112,sus.no,susno,&explicit/1-susno,Stavanger University Hospital,SUS,,,,,,,,,,,https://ror.org/04zn72g03,Stavanger University Hospital,SUS, +113,helse-midt.no,helse-midtno,&explicit/1-helse-midtno,Central Norway Regional Health Authority,Helse Midt,,,,,,,,,,,https://ror.org/04t838f48,Central Norway Regional Health Authority,, +114,helse-mr.no,helse-mrno,&explicit/1-helse-mrno,Helse Møre og Romsdal,Helse MR,,,,,,,,,,,https://ror.org/05ka2ew29,Helse Møre og Romsdal,, +115,stolav.no,stolavno,&explicit/1-stolavno,St Olav's University Hospital,St. Olav,,,,,,,,,,,https://ror.org/01a4hbq44,St Olav's University Hospital,, +116,hnt.no,hntno,&explicit/1-hntno,Helse Nord-Trøndelag,Helse NT,,,,,,,,,,,,,, +117,helse-nord.no,helse-nordno,&explicit/1-helse-nordno,Northern Norway Regional Health Authority,Helse Nord,,,,,,,,,,,https://ror.org/05f6c0c45,Northern Norway Regional Health Authority,, +118,helgelandssykehuset.no,helgelandssykehusetno,&explicit/1-helgelandssykehusetno,Helgelandssykehuset,Helgeland,,,,,,,,,,,,,, +119,finnmarkssykehuset.no,finnmarkssykehusetno,&explicit/1-finnmarkssykehusetno,Finnmarkssykehuset,Finnmark,,,,,,,,,,,https://ror.org/04z1ebj23,Finnmarkssykehuset,, +120,nordlandssykehuset.no,nordlandssykehusetno,&explicit/1-nordlandssykehusetno,Nordland Hospital Trust,Nordland,,,,,,,,,,,https://ror.org/04wjd1a07,Nordland Hospital Trust,, +121,helse-sorost.no,helse-sorostno,&explicit/1-helse-sorostno,Southern and Eastern Norway Regional Health Authority,Helse SØ,,,,,,,,,,,https://ror.org/02qx2s478,Southern and Eastern Norway Regional Health Authority,, +122,ahus.no,ahusno,&explicit/1-ahusno,Akershus University Hospital,Ahus,,,,,,,,,,,https://ror.org/0331wat71,Akershus University Hospital,, +123,oslo-universitetssykehus.no,oslo-universitetssykehusno,&explicit/1-oslo-universitetssykehusno,Oslo University Hospital,Oslo,,,,,,,,,,,https://ror.org/00j9c2840,Oslo University Hospital,, +124,fjellhaug.no,fjellhaugno,&explicit/1-fjellhaugno,Fjellhaug International University College,FIUC,,,,117700,fjellhaug.no,Fjellhaug International University College,Fjellhaug Internasjonale Høgskole,Fjellhaug Internasjonale Høgskule,Fjellhaug International University College,,https://ror.org/00j9c2840,Fjellhaug International University College,FIH, +125,vea-fs.no,vea-fsno,&explicit/1-vea-fsno,Norway’s green vocational school,Vea,,,,,,,,,,,,,, +126,bdm.no,bdmno,&explicit/1-bdmno,Barratt Due Institute of Music,BDM,,,,,,,,,,,https://ror.org/05dqc2261,Barratt Due,, +127,bas.org,basorg,&explicit/1-basorg,Bergen School of Architecture,BAS,,,,,,,,,,,https://ror.org/00g8zjy95,Bergen School of Architecture,, +128,steinerhoyskolen.no,steinerhoyskolenno,&explicit/1-steinerhoyskolenno,Rudolf Steiner University College,Steiner,,,,,,,,,,,https://ror.org/00kxk0k30,Rudolf Steiner University College,, +129,amh.no,amhno,&explicit/1-amhno,Atlantis Medisinske Høgskole,AMH,,,,,,,,,,,,,, +130,hgut.no,hgutno,&explicit/1-hgutno,Høgskulen for grøn utvikling,HGUt,,,,,,,,,,,,,, +131,hfdk.no,hfdkno,&explicit/1-hfdkno,Høyskolen for dansekunst,HFDK,,,,,,,,,,,,,, +132,hlt.no,hltno,&explicit/1-hltno,Norwegian School of Leadership and Theology,HLT,,,,,,,,,,,,,, +133,hfy.no,hfyno,&explicit/1-hfyno,University College of Vocational Education,HØFY,,,,,,,,,,,https://ror.org/04r8kt465,University College of Vocational Education,, +134,krus.no,krusno,&explicit/1-krusno,University College of Norwegian Correctional Service,KRUS,,,,2169824,krus.no,Kriminalomsorgens høgskole og utdanningssenter KRUS,Kriminalomsorgens høgskole og utdanningssenter KRUS,,,,https://ror.org/020mpkg22,University College of Norwegian Correctional Service,, +135,limpimusic.com,limpimusiccom,&explicit/1-limpimusiccom,Lillehammer Institute of Music Production and Industries,Limpi,,,,,,,,,,,,,, +136,noroff.no,noroffno,&explicit/1-noroffno,Noroff School of technology and digital media,Noroff,,,,,,,,,,,,,, +137,barnebokinstituttet.no,barnebokinstituttetno,&explicit/1-barnebokinstituttetno,The Norwegian Institute for Children’s Books,NBI,,,,,,,,,,,https://ror.org/03s2agk53,The Norwegian Institute for Children’s Books,, +138,gestalt.no,gestaltno,&explicit/1-gestaltno,Norwegian Gestalt Institute,NGI,,,,,,,,,,,,,, +139,nski.no,nskino,&explicit/1-nskino,NSKI University College,NSKI,,,,,,,,,,,,,, +140,oslonh.no,oslonhno,&explicit/1-oslonhno,Oslo New University College,ONH,,,,,,,,,,,,,, +141,skrivekunst.no,skrivekunstno,&explicit/1-skrivekunstno,Skrivekunstakademiet,Skrivekunst,,,,,,,,,,,,,, +142,unis.no,unisno,&explicit/1-unisno,University Centre in Svalbard,UNIS,,,,61,unis.no,The University Center in Svalbard,Universitetssenteret på Svalbard,,The University Center in Svalbard,,https://ror.org/03cyjf656,University Centre in Svalbard,UNIS, +143,rise-pfi.no,rise-pfino,&explicit/1-rise-pfino,RISE PFI,RISE PFI,,,,,,,,,,,,,, +144,aquateam.no,aquateamno,&explicit/1-aquateamno,Aquateam COWI,Aquateam,,,,,,,,,,,,,, +145,dsa.no,dsano,&explicit/1-dsano,Norwegian Radiation and Nuclear Safety Authority,DSA,,,,,,,,,,,https://ror.org/039kcn609,Norwegian Radiation and Nuclear Safety Authority,DSA, +146,arkivverket.no,arkivverketno,&explicit/1-arkivverketno,National Archives of Norway,Arkivverket,,,,,,,,,,,,,, +147,niom.no,niomno,&explicit/1-niomno,Nordic Institute of Dental Materials,NIOM,,,,,,,,,,,https://ror.org/015xbps36,Nordic Institute of Dental Materials,, +148,norges-bank.no,norges-bankno,&explicit/1-norges-bankno,Central Bank of Norway,NB,,,,,,,,,,,https://ror.org/01v408m73,Central Bank of Norway,, +149,nve.no,nveno,&explicit/1-nveno,Norwegian Water Resources and Energy Directorate,NVE,,,,,,,,,,,https://ror.org/02syy7986,Norwegian Water Resources and Energy Directorate,, +150,norner.no,nornerno,&explicit/1-nornerno,Norner,Norner,,,,,,,,,,,https://ror.org/05ew68y43,Norner,, +151,norskfolkemuseum.no,norskfolkemuseumno,&explicit/1-norskfolkemuseumno,Norsk Folkemuseum,Norsk Folkemuseum,,,,,,,,,,,https://ror.org/02t6kpd72,Norsk Folkemuseum,, +152,kartverket.no,kartverketno,&explicit/1-kartverketno,Norwegian Mapping Authority,NMA,,,,,,,,,,,https://ror.org/05dz27378,Norwegian Mapping Authority,, +153,ssb.no,ssbno,&explicit/1-ssbno,Statistics Norway,SSB,,,,,,,,,,,https://ror.org/02e50va28,Statistics Norway,, +154,arkivet.no,arkivetno,&explicit/1-arkivetno,ARKIVET Peace and Human Rights Centre,ARKIVET,,,,,,,,,,,,,, +155,tekniskmuseum.no,tekniskmuseumno,&explicit/1-tekniskmuseumno,Norwegian Museum of Science and Technology,,,,,,,,,,,,https://ror.org/00zave958,Norwegian Museum of Science and Technology,, diff --git a/distros/dataverse.no/init.d/affiliations/affiliations.sql b/distros/dataverse.no/init.d/affiliations/affiliations.sql new file mode 100644 index 0000000..0f3f8a3 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/affiliations.sql @@ -0,0 +1,107 @@ +truncate table dvnoaffiliations; +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('1', 'The Norwegian Police University College', 'phs.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('2', 'University of Agder', 'uia.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('3', 'NIFU', 'nifu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('4', 'Østfold University College', 'hiof.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('5', 'The Oslo School of Architecture and Design', 'aho.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('6', 'Chr. Michelsen Institute', 'cmi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('7', 'MF Norwegian School of Theology, Religion and Society', 'mf.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('8', 'Queen Maud University College', 'dmmh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('9', 'Norwegian School of Economics', 'nhh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('10', 'NLA University College', 'nla.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('11', 'Norwegian Polar Institute', 'npolar.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('12', 'Norwegian Computing Center', 'nr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('13', 'SINTEF', 'sintef.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('14', 'Sámi allaskuvla – Sámi University College', 'samiskhs.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('15', 'University of Bergen', 'uib.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('16', 'University of Oslo', 'uio.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('17', 'UiT The Arctic University of Norway', 'uit.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('18', 'Norwegian University of Science and Technology', 'ntnu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('19', 'Norwegian Institute for Nature Research', 'nina.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('20', 'Geological Survey of Norway', 'ngu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('21', 'Molde University College', 'himolde.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('22', 'National Library of Norway', 'nb.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('23', 'University of Stavanger', 'uis.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('24', 'Volda University College', 'hivolda.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('25', 'Oslo National Academy of the Arts', 'khio.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('26', 'Institute for Social Research', 'samfunnsforskning.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('27', 'Lovisenberg Diaconal University College', 'ldh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('28', 'Norwegian Institute of Public Health', 'fhi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('29', 'Norwegian School of Sport Sciences', 'nih.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('30', 'BI Norwegian Business School', 'bi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('31', 'Norwegian Academy of Music', 'nmh.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('32', 'Kristiania University College', 'kristiania.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('33', 'Norwegian Defence University College', 'fhs.mil.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('34', 'Ansgar University College', 'ansgarskolen.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('35', 'OsloMet – Oslo Metropolitan University', 'oslomet.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('36', 'Norwegian University of Life Sciences', 'nmbu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('37', 'Norwegian Institute of Bioeconomy Research', 'nibio.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('38', 'VID Specialized University', 'vid.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('39', 'Nord University', 'nord.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('40', 'University of South-Eastern Norway', 'usn.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('41', 'Western Norway University of Applied Sciences', 'hvl.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('42', 'Norwegian centre for violence and traumatic stress studies', 'nkvts.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('43', 'Inland Norway University of Applied Sciences', 'inn.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('44', 'Norwegian Veterinary Institute', 'vetinst.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('45', 'NUBU - The Norwegian Center for Child Behavioral Development', 'nubu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('46', 'The Norwegian Center for Holocaust and Minority Studies', 'hlsenteret.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('47', 'Norwegian Meteorological Institute', 'met.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('48', 'Simula Research Laboratory', 'simula.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('49', 'Agder Research', 'agderforskning.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('50', 'Akvaplan-niva', 'akvaplan.niva.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('51', 'Norwegian Labour Movement Archives and Library', 'arbark.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('52', 'Centre for Advanced Study', 'cas.oslo.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('53', 'CICERO Center for International Climate Research', 'cicero.oslo.no', '3'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('54', 'Christian Michelsen Research', 'cmr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('55', 'nan', 'dataverse.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('56', 'nan', 'DataverseNO Admin', '1'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('57', 'nan', 'DataverseNO Curator', '1'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('58', 'nan', 'DataverseNO Dataset Creator', '1'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('59', 'Diakonova', 'diakonova.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('60', 'Fafo Foundation', 'fafo.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('61', 'Norwegian Defence Research Establishment', 'ffi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('62', 'Flymedisinsk institutt', 'flymed.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('63', 'Fridtjof Nansen Institute', 'fni.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('64', 'GenØk – Centre for Biosafety', 'genok.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('65', 'Norwegian Institute of Marine Research', 'hi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('66', 'Institute for Energy Technology', 'ife.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('67', 'International Research Institute of Stavanger', 'iris.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('68', 'Institute for Church, Religion, and Worldview Research', 'kifo.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('69', 'Cancer Registry of Norway', 'kreftregisteret.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('70', 'nan', 'legeforeningen.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('71', 'Møreforsking', 'moreforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('72', 'Nansen Environmental and Remote Sensing Center', 'nersc.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('73', 'Aeromedical Center of Norway', 'nfms.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('74', 'Nordland Research Institute', 'nforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('75', 'Norwegian Geotechnical Institute', 'ngi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('76', 'Norwegian Institute for Cultural Heritage Research', 'niku.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('77', 'Norwegian Institute for Air Research', 'nilu.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('78', 'Norwegian Institute for Water Research', 'niva.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('79', 'Norsk Landbruksrådgiving', 'nlr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('80', 'Norwegian Nobel Institute', 'nobel.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('81', 'Nofima', 'nofima.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('82', 'Norwegian Research Centre', 'norceresearch.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('83', 'Norwegian Seismic Array', 'norsar.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('84', 'Norsk senter for økologisk landbruk', 'norsok.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('85', 'Norwegian Institute for Sustainability Research', 'norsus.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('86', 'nan', 'norut.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('87', 'Norwegian Institute of International Affairs', 'nupi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('88', 'Ostfold Research', 'ostfoldforskning.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('89', 'Eastern Norway Research Institute', 'ostforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('90', 'Paper and Fibre Research Institute', 'pfi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('91', 'Peace Research Institute', 'prio.org', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('92', 'RISE Fire Research', 'risefr.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('93', 'Institute for Rural and Regional Research', 'ruralis.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('94', 'Centre for Intercultural Communication', 'sik.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('95', 'Centre for Applied Research', 'snf.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('96', 'National Institute of Occupational Health', 'stami.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('97', 'Teknova', 'teknova.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('98', 'Tel-Tek', 'tel-tek.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('99', 'Trøndelag Forskning og Utvikling', 'tfou.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('100', 'TISIP', 'tisip.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('101', 'Telemark Research Institute', 'tmforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('102', 'Institute of Transport Economics', 'toi.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('103', 'Norwegian Institute of Wood Technology', 'treteknisk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('104', 'Uni Research', 'uni.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('105', 'Western Norway Research Institute', 'vestforsk.no', '2'); +insert into dvnoaffiliations (id, dvno_affiliation, dvno_group_name, dvno_email_level) values ('106', 'Westerdals Oslo School of Arts, Communication and Technology', 'westerdals.no', '2'); diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql new file mode 100644 index 0000000..6c8fd47 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql @@ -0,0 +1,19 @@ +CREATE TABLE IF NOT EXISTS public.dvnoaffiliations ( + id bigint, + dvno_affiliation character varying(255) DEFAULT NULL::character varying, + dvno_group_name character varying(255) DEFAULT NULL::character varying, + dvno_email_level integer DEFAULT 2 +); + +ALTER TABLE public.dvnoaffiliations OWNER TO dataverse; + +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); +RETURN NULL; +END; +$$ diff --git a/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 new file mode 100644 index 0000000..1dc9f7b --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/builtinuser_trigger.sql.2 @@ -0,0 +1,2 @@ +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.builtinuser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); +#CREATE TRIGGER affiliation_trigger_update AFTER UPDATE ON public.authenticateduser FOR EACH ROW WHEN (OLD.emailconfirmed is not null) EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/extratrigger.sql b/distros/dataverse.no/init.d/affiliations/extratrigger.sql new file mode 100644 index 0000000..e2396f8 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/extratrigger.sql @@ -0,0 +1,21 @@ +CREATE TABLE IF NOT EXISTS public.dvnoaffiliations ( + id bigint, + dvno_affiliation character varying(255) DEFAULT NULL::character varying, + dvno_group_name character varying(255) DEFAULT NULL::character varying, + dvno_email_level integer DEFAULT 2 +); + +ALTER TABLE public.dvnoaffiliations OWNER TO dataverse; + +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations); + +RETURN NULL; +END; +$$ +CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/extratrigger.sql.2 b/distros/dataverse.no/init.d/affiliations/extratrigger.sql.2 new file mode 100644 index 0000000..397b335 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/extratrigger.sql.2 @@ -0,0 +1 @@ +CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/extratrigger.sql.3 b/distros/dataverse.no/init.d/affiliations/extratrigger.sql.3 new file mode 100644 index 0000000..5423342 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/extratrigger.sql.3 @@ -0,0 +1,12 @@ +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations); + +RETURN NULL; +END; +$$ + diff --git a/distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql b/distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql new file mode 100644 index 0000000..9383859 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/groupuser_trigger.sql @@ -0,0 +1,28 @@ +DROP TRIGGER IF EXISTS group_trigger on authenticateduser; +DROP FUNCTION IF EXISTS public.groupmonitor() CASCADE; + +CREATE FUNCTION public.groupmonitor() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN insert into explicitgroup_authenticateduser + select e.id, a.id from explicitgroup as e, authenticateduser as a + + + where emailconfirmed is not null AND ( ( split_part(a.email,'@', 2) = e.displayname) or + + ( split_part(split_part(a.email,'@', 2) , '.',2 ) ||'.'|| split_part(split_part(a.email,'@', 2) , '.',3) = e.displayname) + or + + ( split_part(split_part(a.email,'@', 2) , '.',3 ) ||'.'|| split_part(split_part(a.email,'@', 2) , '.',4) = e.displayname) + ) + + and NOT EXISTS (select 1 from explicitgroup_authenticateduser + where a.id = containedauthenticatedusers_id and e.id = explicitgroup_id); + RETURN NEW; +END; +$$; + +CREATE TRIGGER group_trigger AFTER UPDATE OF emailconfirmed ON public.authenticateduser FOR EACH ROW EXECUTE PROCEDURE public.groupmonitor(); + + diff --git a/distros/dataverse.no/init.d/affiliations/play_audio.sql b/distros/dataverse.no/init.d/affiliations/play_audio.sql new file mode 100644 index 0000000..f368f52 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql b/distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql new file mode 100644 index 0000000..2750674 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_mp3.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/mp3'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql b/distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql new file mode 100644 index 0000000..66f6fc8 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_mpeg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/mpeg'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql b/distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql new file mode 100644 index 0000000..3a7d4ed --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_ogg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/ogg'; diff --git a/distros/dataverse.no/init.d/affiliations/play_audio_wav.sql b/distros/dataverse.no/init.d/affiliations/play_audio_wav.sql new file mode 100644 index 0000000..0e02df2 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_audio_wav.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Audio' and contenttype='audio/wav'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video.sql b/distros/dataverse.no/init.d/affiliations/play_video.sql new file mode 100644 index 0000000..61eb4c0 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video_mp4.sql b/distros/dataverse.no/init.d/affiliations/play_video_mp4.sql new file mode 100644 index 0000000..b14d4c6 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video_mp4.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video' and contenttype='video/mp4'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video_ogg.sql b/distros/dataverse.no/init.d/affiliations/play_video_ogg.sql new file mode 100644 index 0000000..1c997af --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video_ogg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video' and contenttype='video/ogg'; diff --git a/distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql b/distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql new file mode 100644 index 0000000..6e99d94 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/play_video_quicktime.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Play Video' and contenttype='video/quicktime'; diff --git a/distros/dataverse.no/init.d/affiliations/read_document.sql b/distros/dataverse.no/init.d/affiliations/read_document.sql new file mode 100644 index 0000000..d54d3f6 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/read_document.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document'; diff --git a/distros/dataverse.no/init.d/affiliations/read_text.sql b/distros/dataverse.no/init.d/affiliations/read_text.sql new file mode 100644 index 0000000..782e491 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/read_text.sql @@ -0,0 +1 @@ +select * from externaltool where displayname='Read Text' and contenttype='text/plain'; diff --git a/distros/dataverse.no/init.d/affiliations/read_text_plain.sql b/distros/dataverse.no/init.d/affiliations/read_text_plain.sql new file mode 100644 index 0000000..782e491 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/read_text_plain.sql @@ -0,0 +1 @@ +select * from externaltool where displayname='Read Text' and contenttype='text/plain'; diff --git a/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql b/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql new file mode 100644 index 0000000..7b86a61 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/recreate_trigger.sql @@ -0,0 +1,3 @@ +DROP TRIGGER IF EXISTS affiliation_trigger ON public.authenticateduser; +DROP TRIGGER IF EXISTS affiliation_trigger ON public.actionlogrecord; +CREATE TRIGGER affiliation_trigger AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql new file mode 100644 index 0000000..c58aa9e --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql @@ -0,0 +1,23 @@ +CREATE TABLE IF NOT EXISTS public.dvnoaffiliations ( + id bigint, + dvno_affiliation character varying(255) DEFAULT NULL::character varying, + dvno_group_name character varying(255) DEFAULT NULL::character varying, + dvno_email_level integer DEFAULT 2 +); + +ALTER TABLE public.dvnoaffiliations OWNER TO dataverse; + +CREATE OR REPLACE FUNCTION public.affiliationupdate() RETURNS trigger + LANGUAGE plpgsql + AS $$ + +BEGIN +IF NEW.actionsubtype='login' THEN + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); + update authenticateduser set affiliation=dvno_affiliation from dvnoaffiliations where dvno_group_name=substring(email, '\S+\W(\w+\W+\w+\W+\w+)') and substring(email, '\S+\W(\w+\W+\w+\W+\w+)') in (select dvno_group_name from dvnoaffiliations) and useridentifier in (select regexp_replace(useridentifier, '@', '') from actionlogrecord where actionsubtype='login' order by starttime desc limit 1); +END IF +RETURN NULL; +END; +$$ +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); +DELETE from setting where name=':Shoulder'; diff --git a/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 new file mode 100644 index 0000000..c1307ec --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/updatetrigger.sql.2 @@ -0,0 +1,2 @@ +DROP TRIGGER affiliation_trigger_actionlog on public.actionlogrecord; +CREATE TRIGGER affiliation_trigger_actionlog AFTER INSERT ON public.actionlogrecord FOR EACH ROW EXECUTE PROCEDURE public.affiliationupdate(); diff --git a/distros/dataverse.no/init.d/affiliations/view_annotation.sql b/distros/dataverse.no/init.d/affiliations/view_annotation.sql new file mode 100644 index 0000000..97d252e --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_annotation.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Annotations'; diff --git a/distros/dataverse.no/init.d/affiliations/view_csv.sql b/distros/dataverse.no/init.d/affiliations/view_csv.sql new file mode 100644 index 0000000..2cae4e5 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_csv.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data' and contenttype='text/csv'; diff --git a/distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql b/distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql new file mode 100644 index 0000000..ca83677 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_csv_tabular.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data' and contenttype='text/comma-separated-values'; diff --git a/distros/dataverse.no/init.d/affiliations/view_data.sql b/distros/dataverse.no/init.d/affiliations/view_data.sql new file mode 100644 index 0000000..8dc243a --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_data.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data'; diff --git a/distros/dataverse.no/init.d/affiliations/view_html.sql b/distros/dataverse.no/init.d/affiliations/view_html.sql new file mode 100644 index 0000000..beedf68 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_html.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Html'; diff --git a/distros/dataverse.no/init.d/affiliations/view_html_type.sql b/distros/dataverse.no/init.d/affiliations/view_html_type.sql new file mode 100644 index 0000000..288af2d --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_html_type.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Html' and contentType='text/html'; diff --git a/distros/dataverse.no/init.d/affiliations/view_hypothesis.sql b/distros/dataverse.no/init.d/affiliations/view_hypothesis.sql new file mode 100644 index 0000000..1113a63 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_hypothesis.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Annotations' and contenttype='application/x-json-hypothesis'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image.sql b/distros/dataverse.no/init.d/affiliations/view_image.sql new file mode 100644 index 0000000..9a6d158 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image_gif.sql b/distros/dataverse.no/init.d/affiliations/view_image_gif.sql new file mode 100644 index 0000000..6ed69fb --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image_gif.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image' and contenttype='image/gif'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql b/distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql new file mode 100644 index 0000000..a139467 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image_jpeg.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image' and contenttype='image/jpeg'; diff --git a/distros/dataverse.no/init.d/affiliations/view_image_png.sql b/distros/dataverse.no/init.d/affiliations/view_image_png.sql new file mode 100644 index 0000000..f2202a5 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_image_png.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Image' and contenttype='image/png'; diff --git a/distros/dataverse.no/init.d/affiliations/view_pdf.sql b/distros/dataverse.no/init.d/affiliations/view_pdf.sql new file mode 100644 index 0000000..fb4dd1f --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_pdf.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document' and contenttype='application/pdf'; diff --git a/distros/dataverse.no/init.d/affiliations/view_r.sql b/distros/dataverse.no/init.d/affiliations/view_r.sql new file mode 100644 index 0000000..fb4dd1f --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_r.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document' and contenttype='application/pdf'; diff --git a/distros/dataverse.no/init.d/affiliations/view_r_file.sql b/distros/dataverse.no/init.d/affiliations/view_r_file.sql new file mode 100644 index 0000000..2e45cb0 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_r_file.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View R file' and contenttype='type/x-r-syntax'; diff --git a/distros/dataverse.no/init.d/affiliations/view_stata.sql b/distros/dataverse.no/init.d/affiliations/view_stata.sql new file mode 100644 index 0000000..fb4dd1f --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_stata.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Read Document' and contenttype='application/pdf'; diff --git a/distros/dataverse.no/init.d/affiliations/view_tsv.sql b/distros/dataverse.no/init.d/affiliations/view_tsv.sql new file mode 100644 index 0000000..6e5a064 --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/view_tsv.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='View Data' and contenttype='text/tab-separated-values'; diff --git a/distros/dataverse.no/init.d/affiliations/webloader_check.sql b/distros/dataverse.no/init.d/affiliations/webloader_check.sql new file mode 100644 index 0000000..5c5ef0a --- /dev/null +++ b/distros/dataverse.no/init.d/affiliations/webloader_check.sql @@ -0,0 +1 @@ +select displayname from externaltool where displayname='Dataverse WebLoader'; diff --git a/distros/dataverse.no/init.d/cors.json b/distros/dataverse.no/init.d/cors.json new file mode 100644 index 0000000..3bd9c03 --- /dev/null +++ b/distros/dataverse.no/init.d/cors.json @@ -0,0 +1,10 @@ +{ + "CORSRules": [ + { + "AllowedOrigins": ["*"], + "AllowedHeaders": ["*"], + "AllowedMethods": ["PUT", "GET"], + "ExposeHeaders": ["ETag"] + } + ] +} diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh new file mode 100644 index 0000000..4cfd6fc --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sh @@ -0,0 +1,7 @@ +#!/bin/bash +export PGPASSWORD=`cat /secrets/db/password` +pg_dump -U dataverse dataverse -h postgres -t actionlogrecord > ${DATA_DIR}/actionlogrecord$(date +'%Y%m%d').dump +gzip --force ${DATA_DIR}/actionlogrecord$(date +'%Y%m%d').dump +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/actionlog/actionlogrecord.sql + + diff --git a/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql new file mode 100644 index 0000000..9a90101 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/actionlog/actionlogrecord.sql @@ -0,0 +1,2 @@ +DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days'; + diff --git a/distros/dataverse.no/init.d/cronjob/backup-fuction.sql b/distros/dataverse.no/init.d/cronjob/backup-fuction.sql new file mode 100644 index 0000000..6500497 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/backup-fuction.sql @@ -0,0 +1,50 @@ +drop function get_identifier; +create function get_identifier(thisid int) +returns text +language plpgsql +as +$$ +declare + authoritystring TEXT; + v_sql text; +begin + v_sql = 'select identifier from dvobject where id in (select owner_id from dvobject where id=' || thisid || ' order by id desc) order by id desc'; + execute v_sql into authoritystring; + return authoritystring; +end; +$$; + +drop function get_ownership; +create function get_ownership(thisid int) +returns text +language plpgsql +as +$$ +declare + authoritystring TEXT; + v_sql text; +begin + v_sql = 'select concat_ws(authority, identifier) from dvobject where id in (select owner_id from dvobject where id=' || thisid || ' order by id desc) order by id desc'; + execute v_sql into authoritystring; + return authoritystring; +end; +$$; + + + +drop function get_authority; +create function get_authority(thisid int) +returns text +language plpgsql +as +$$ +declare + authoritystring TEXT; + v_sql text; +begin + v_sql = 'select authority from dvobject where id in (select owner_id from dvobject where id=' || thisid || ' order by id desc) order by id desc'; + execute v_sql into authoritystring; + return authoritystring; +end; +$$; + diff --git a/distros/dataverse.no/init.d/cronjob/backupData.sh b/distros/dataverse.no/init.d/cronjob/backupData.sh new file mode 100755 index 0000000..6dacf6f --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/backupData.sh @@ -0,0 +1,40 @@ +#!/bin/bash +export PGPASSWORD=`cat /secrets/db/password` +cp -r /secrets/aws-cli/.aws ~ + +# copy Database to blob and s3 +#sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' +#wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - +#apt update +#apt install postgresql-client-15 -y +#pg_dump -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} | gzip > /mnt/dataverse.dump.gz + +dumpName="dataverse.`date +%Y%m%d_%H%M%z`.dump.gz" + +if [ -d "/data/databaseDumps/" ]; then + cp /mnt/dataverse.dump.gz /data/databaseDumps/${dumpName} + echo "copied" ${dumpName} +fi + +aws s3 --endpoint https://$aws_endpoint cp /mnt/dataverse.dump.gz s3://$aws_bucket_name/databaseDumps/${dumpName} + +# backup files +#file=10.21337/OZ4JBV/1869225dfbd-4edecc03da9e +files=`psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2]}' | sed "s/S3:\/\/$aws_bucket_name://"` + +for file in $files + do + if [ -f /data/$file ]; then + echo "$file already copied" + else + aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file + echo "doneit" + fi + + done +#echo $files + +#rm -rf ~/.aws + +#cp -r /secrets/aws-cli/.aws ~ +#aws s3 --endpoint https://$aws_endpoint cp s3://$aws_bucket_name/$file /data/$file diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sh b/distros/dataverse.no/init.d/cronjob/backupfiles.sh new file mode 100644 index 0000000..f9fa557 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sh @@ -0,0 +1,4 @@ +#!/bin/bash +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/cronjob/backupfiles.sql | grep S3 +#select storageidentifier from dvobject where modificationtime>='2023-02-02'; diff --git a/distros/dataverse.no/init.d/cronjob/backupfiles.sql b/distros/dataverse.no/init.d/cronjob/backupfiles.sql new file mode 100644 index 0000000..aa963a8 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/backupfiles.sql @@ -0,0 +1,12 @@ +--select storageidentifier from dvobject where modificationtime>='2022-12-05'; + + +--select storageidentifier, CONCAT(authority, '/',split_part(identifier, '/', 1) , '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '1 day'); + + +select storageidentifier, CONCAT( get_authority(id), '/', get_identifier(id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')) from dvobject where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day'); + + + +--select split_part(identifier, '/', 1) from dvobject where storageidentifier like '%S3:%' and identifier like '%/%' limit 10; +--select storageidentifier, CONCAT(authority, '/', identifier, '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), id from dvobject where storageidentifier like '%S3:%' limit 10; diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh new file mode 100755 index 0000000..114b6c4 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +#BASEURL="https://....blob.core.windows.net/data1" +#KEYWINDOWSBLOB="" +FILEPATH="/data/dataverse-files/" + +#BASEURL="$1" +#KEYWINDOWSBLOB="$2" + +export PGPASSWORD=`cat /secrets/db/password` +cp -r /secrets/aws-cli/.aws ~ + +# +LogDir="/opt/payara/appserver/glassfish/domains/domain1/logs/" +if [ ! -d "${LogDir}" ]; then + LogDir="/var/log/" +fi + +if [ ! -d "/mnt/" ]; then + mkdir -p "/mnt/" +fi + +LogFile="${LogDir}checkETag_`date +%Y%m%d_%H%M%z`.log" +if [ ! -f "/mnt/checkETagByFiles.txt" ]; then + echo "`date +%Y%m%d_%H%M%z`: Start psql" > ${LogFile} + psql -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} -f ${INIT_SCRIPTS_FOLDER}/cronjob/checkETagByFiles.sql | grep S3 | awk '{split($0,a,"|"); print a[2] a[3]}' | sed "s/S3:\/\/$aws_bucket_name://" > /mnt/checkETagByFiles.txt + echo "`date +%Y%m%d_%H%M%z`: END psql" >> ${LogFile} +fi + +#while read p; do +while true; do + + if [ -f "/mnt/checkETagByFiles.txt" ]; then + line=$(head -n 1 /mnt/checkETagByFiles.txt) + + IFS=' ' read -a arrayData <<< "$line" + + #s3ETag=$(aws s3api --endpoint https://$aws_endpoint head-object --bucket $aws_bucket_name --key ${arrayData[0]} 2> /dev/null | jq .ETag | sed 's/\"//g' | sed 's/\\//g') + + #curl -s "https://....blob.core.windows.net/data1?sp=r&st=2024-04-15T10:25:37Z&se=2024-04-15T18:25:37Z&spr=https&sv=2022-11-02&sr=c&sig=" -I -q | grep "Content-MD5:" | awk '{ print $2 }' | base64 -di | xxd -p + arrayData[0]=$(echo ${arrayData[0]} | sed -e 's/S3\:\/\/2002-yellow-dataverseno\://g') + md5BlobBase64=$(curl -s "${BASEURL}${FILEPATH}${arrayData[0]}${KEYWINDOWSBLOB}" -I -q | grep "Content-MD5: " | awk '{ print $2 }' | base64 -di) + + if [ $? -eq 0 ]; then + md5Blob=$(echo -n "$md5BlobBase64" | xxd -p) + + #if [ -z "${s3ETag}" ]; then + if [ -z "${md5BlobBase64}" ]; then + echo "is not exist in the s3 storage: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile} + else + + #if [ "${s3ETag}" != "${arrayData[1]}" ]; then + if [ "${md5Blob}" != "${arrayData[1]}" ]; then + echo "is not equal: ${arrayData[0]} -- ${arrayData[1]}" >> ${LogFile} + fi + fi + fi + + #tail -n +2 "/mnt/checkETagByFiles.txt" > "/mnt/checkETagByFiles.txt.tmp" + sed '1d' "/mnt/checkETagByFiles.txt" > "/mnt/checkETagByFiles.txt.tmp" + mv "/mnt/checkETagByFiles.txt.tmp" "/mnt/checkETagByFiles.txt" + + if [ ! -s "/mnt/checkETagByFiles.txt" ]; then + rm /mnt/checkETagByFiles.txt + exit 0 + fi + + sleep 1s + fi +done + +exit 0 diff --git a/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql new file mode 100644 index 0000000..e3a1ce3 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/checkETagByFiles.sql @@ -0,0 +1,3 @@ + +--select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', REPLACE(storageidentifier, 'S3://2002-green-dataversenotest1:','')), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and modificationtime>=(current_date - INTERVAL '2 day'); +select storageidentifier, CONCAT( get_authority(dvobject.id), '/', get_identifier(dvobject.id), '/', storageidentifier ), datafile.checksumvalue from dvobject INNER join datafile on dvobject.id=datafile.id where storageidentifier like '%S3:%' and dtype='DataFile' and publicationdate IS NOT NULL and protocol IS NOT NULL and identifier IS NOT NULL; \ No newline at end of file diff --git a/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh new file mode 100755 index 0000000..5babb41 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# +# + + +#docker exec --user postgres postgres pg_dump -U dataverse dataverse > /mnt/dataverse.dump +#gzip -f /mnt/dataverse.dump + +# dump databases +docker exec --user postgres postgres pg_dump -U dataverse dataverse | gzip > /mnt/dataverse.dump.gz +#docker exec --user postgres postgres pg_dump -h ${DATAVERSE_DB_HOST} -U ${DATAVERSE_DB_USER} ${POSTGRES_DATABASE} | gzip > /mnt/dataverse.dump.gz + +#cp /mnt/dataverse.dump.gz /mntblob/data/databaseDumps/dataverse.`date +%Y%m%d_%H%M%z`.dump.gz +rsync -arvP --rsh=ssh /mnt/dataverse.dump.gz DVmtr-cmp01:/tmp/dataverse.dump.gz + +docker exec dataverse /opt/payara/init.d/cronjob/backupData.sh + +rm /mnt/dataverse.dump.gz \ No newline at end of file diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification.json b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json new file mode 100644 index 0000000..d5067d1 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification.json @@ -0,0 +1,10 @@ +{ + "dismissibleByUser": "false", + "messageTexts": [ + { + "lang": "en", + "message": "Please note that at 08:00 CE(S)T every Thursday morning, DataverseNO will not be available for approx. 5 minutes due to regular maintenance. Please avoid upload data or edit your dataset in other ways during this short period. Thank you! For questions, please contact the DataverseNO Team at support@dataverse.no." + } + + ] +}  diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh b/distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh new file mode 100755 index 0000000..dadfc3d --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification_off.sh @@ -0,0 +1,5 @@ +#! /bin/bash + +curl -X DELETE http://localhost:8080/api/admin/bannerMessage/$(curl -s -X GET http://localhost:8080/api/admin/bannerMessage | jq -r 'first(.data[]|select(.displayValue | contains("due to regular maintenance")).id)') + +# 30 06 * * 4 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_off.sh" diff --git a/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh b/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh new file mode 100755 index 0000000..f745558 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/maintenance_notification_on.sh @@ -0,0 +1,4 @@ +#! /bin/bash +curl -H "Content-type:application/json" -X POST http://localhost:8080/api/admin/bannerMessage --upload-file ${INIT_SCRIPTS_FOLDER}/cronjob/maintenance_notification.json + +# 0 16 * * 3 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_on.sh" diff --git a/distros/dataverse.no/init.d/cronjob/makedatacount.sh b/distros/dataverse.no/init.d/cronjob/makedatacount.sh new file mode 100644 index 0000000..50f5158 --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/makedatacount.sh @@ -0,0 +1,5 @@ +#!/bin/bash +#export YEAR_MONTH=$(date '+%Y-%m') +export YEAR_MONTH=2022-09 +cd /opt/payara/counter-processor/counter-processor-${COUNTERPROSVERSION} +python3.8 main.py diff --git a/distros/dataverse.no/init.d/cronjob/onetime_maintenace.json b/distros/dataverse.no/init.d/cronjob/onetime_maintenace.json new file mode 100644 index 0000000..1fe7cce --- /dev/null +++ b/distros/dataverse.no/init.d/cronjob/onetime_maintenace.json @@ -0,0 +1,10 @@ +{ + "dismissibleByUser": "false", + "messageTexts": [ + { + "lang": "en", + "message": "Due to maintenance, DataverseNO will be unavailable between Thursday September 21 at 16:00 CEST until Friday September 22 at 08:00 CEST. Apologies for the short notice and for the inconvenience. If you have any questions or comments, please contact the DataverseNO team at support@dataverse.no." + } + + ] +}  diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh new file mode 100644 index 0000000..b71ccac --- /dev/null +++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning.sh @@ -0,0 +1,3 @@ +#!/bin/bash +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f /tmp/dvno_geolocation_cleaning20240320.sql diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql new file mode 100644 index 0000000..f70d735 --- /dev/null +++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20231129.sql @@ -0,0 +1,2 @@ +update datasetfieldvalue set value='18.57' where id=204418; +update datasetfieldvalue set value='69.41' where id=204435; diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql new file mode 100644 index 0000000..c0bc9bd --- /dev/null +++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning20240301.sql @@ -0,0 +1,4 @@ +update datasetfieldvalue set value='11.11621830' where id=206935; +update datasetfieldvalue set value='11.51643553' where id=206932; +update datasetfieldvalue set value='61.57060935' where id=206933; +update datasetfieldvalue set value='61.15827431' where id=206938; diff --git a/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql new file mode 100644 index 0000000..ede4eac --- /dev/null +++ b/distros/dataverse.no/migration/_dvno_geolocation_cleaning_v5_13.sql @@ -0,0 +1,3293 @@ +update datasetfieldvalue set value='71.2' where id=6831; +update datasetfieldvalue set value='31.2' where id=6832; +update datasetfieldvalue set value='4.5' where id=6833; +update datasetfieldvalue set value='57.9' where id=6835; +update datasetfieldvalue set value='31.2' where id=6878; +update datasetfieldvalue set value='57.9' where id=6888; +update datasetfieldvalue set value='71.2' where id=6889; +update datasetfieldvalue set value='4.5' where id=6892; +update datasetfieldvalue set value='71.1' where id=15257; +update datasetfieldvalue set value='60.8' where id=15258; +update datasetfieldvalue set value='31.1' where id=15259; +update datasetfieldvalue set value='58.0' where id=15260; +update datasetfieldvalue set value='57.76' where id=17053; +update datasetfieldvalue set value='71.38' where id=17059; +update datasetfieldvalue set value='4.09' where id=17064; +update datasetfieldvalue set value='31.76' where id=17073; +update datasetfieldvalue set value='19.216667' where id=17509; +update datasetfieldvalue set value='19.216667' where id=17514; +update datasetfieldvalue set value='69.583333' where id=17531; +update datasetfieldvalue set value='69.583333' where id=17543; +update datasetfieldvalue set value='19.216667' where id=17689; +update datasetfieldvalue set value='19.216667' where id=17690; +update datasetfieldvalue set value='69.583333' where id=17703; +update datasetfieldvalue set value='69.583333' where id=17705; +update datasetfieldvalue set value='31.1' where id=17841; +update datasetfieldvalue set value='58.0 ' where id=17846; +update datasetfieldvalue set value='71.1' where id=17848; +update datasetfieldvalue set value='60.8 ' where id=17852; +update datasetfieldvalue set value='71.1' where id=17871; +update datasetfieldvalue set value='60.8 ' where id=17874; +update datasetfieldvalue set value='58.0 ' where id=17889; +update datasetfieldvalue set value='31.1' where id=17891; +update datasetfieldvalue set value='31.05' where id=17895; +update datasetfieldvalue set value='28.17' where id=17896; +update datasetfieldvalue set value='69.0' where id=17897; +update datasetfieldvalue set value='70.90' where id=17898; +update datasetfieldvalue set value='69.583333' where id=17930; +update datasetfieldvalue set value='19.216667' where id=17939; +update datasetfieldvalue set value='19.216667' where id=17944; +update datasetfieldvalue set value='69.583333' where id=17949; +update datasetfieldvalue set value='69.583333' where id=17980; +update datasetfieldvalue set value='19.216667' where id=17991; +update datasetfieldvalue set value='19.216667' where id=18000; +update datasetfieldvalue set value='69.583333' where id=18008; +update datasetfieldvalue set value='69.583333' where id=18015; +update datasetfieldvalue set value='19.216667' where id=18025; +update datasetfieldvalue set value='19.216667' where id=18038; +update datasetfieldvalue set value='69.583333' where id=18055; +update datasetfieldvalue set value='69.583333' where id=18058; +update datasetfieldvalue set value='19.216667' where id=18076; +update datasetfieldvalue set value='19.216667' where id=18092; +update datasetfieldvalue set value='69.583333' where id=18097; +update datasetfieldvalue set value='69.583333' where id=18103; +update datasetfieldvalue set value='19.216667' where id=18104; +update datasetfieldvalue set value='69.583333' where id=18130; +update datasetfieldvalue set value='19.216667' where id=18134; +update datasetfieldvalue set value='69.583333' where id=18141; +update datasetfieldvalue set value='19.216667' where id=18152; +update datasetfieldvalue set value='19.216667' where id=18179; +update datasetfieldvalue set value='69.583333' where id=18181; +update datasetfieldvalue set value='69.583333' where id=18189; +update datasetfieldvalue set value='19.216667' where id=18197; +update datasetfieldvalue set value='69.583333' where id=18205; +update datasetfieldvalue set value='19.216667' where id=18206; +update datasetfieldvalue set value='19.216667' where id=18227; +update datasetfieldvalue set value='19.216667' where id=18233; +update datasetfieldvalue set value='69.583333' where id=18235; +update datasetfieldvalue set value='69.583333' where id=18247; +update datasetfieldvalue set value='69.583333' where id=18274; +update datasetfieldvalue set value='19.216667' where id=18275; +update datasetfieldvalue set value='69.583333' where id=18292; +update datasetfieldvalue set value='19.216667' where id=18304; +update datasetfieldvalue set value='69.583333' where id=18320; +update datasetfieldvalue set value='19.216667' where id=18324; +update datasetfieldvalue set value='69.583333' where id=18328; +update datasetfieldvalue set value='19.216667' where id=18345; +update datasetfieldvalue set value='19.216667' where id=18353; +update datasetfieldvalue set value='69.583333' where id=18360; +update datasetfieldvalue set value='19.216667' where id=18362; +update datasetfieldvalue set value='69.583333' where id=18365; +update datasetfieldvalue set value='69.583333' where id=18395; +update datasetfieldvalue set value='19.216667' where id=18405; +update datasetfieldvalue set value='69.583333' where id=18409; +update datasetfieldvalue set value='19.216667' where id=18424; +update datasetfieldvalue set value='69.583333' where id=18435; +update datasetfieldvalue set value='69.583333' where id=18462; +update datasetfieldvalue set value='19.216667' where id=18465; +update datasetfieldvalue set value='19.216667' where id=18470; +update datasetfieldvalue set value='19.216667' where id=18488; +update datasetfieldvalue set value='69.583333' where id=18502; +update datasetfieldvalue set value='69.583333' where id=18503; +update datasetfieldvalue set value='19.216667' where id=18508; +update datasetfieldvalue set value='69.583333' where id=18524; +update datasetfieldvalue set value='19.216667' where id=18530; +update datasetfieldvalue set value='69.583333' where id=18553; +update datasetfieldvalue set value='19.216667' where id=18559; +update datasetfieldvalue set value='19.216667' where id=18570; +update datasetfieldvalue set value='69.583333' where id=18581; +update datasetfieldvalue set value='19.216667' where id=18589; +update datasetfieldvalue set value='69.583333' where id=18597; +update datasetfieldvalue set value='19.216667' where id=18605; +update datasetfieldvalue set value='69.583333' where id=18609; +update datasetfieldvalue set value='69.583333' where id=18617; +update datasetfieldvalue set value='19.216667' where id=18619; +update datasetfieldvalue set value='19.216667' where id=18656; +update datasetfieldvalue set value='69.583333' where id=18663; +update datasetfieldvalue set value='69.583333' where id=18670; +update datasetfieldvalue set value='19.216667' where id=18684; +update datasetfieldvalue set value='19.216667' where id=18689; +update datasetfieldvalue set value='19.216667' where id=18698; +update datasetfieldvalue set value='69.583333' where id=18700; +update datasetfieldvalue set value='69.583333' where id=18726; +update datasetfieldvalue set value='19.216667' where id=18733; +update datasetfieldvalue set value='19.216667' where id=18749; +update datasetfieldvalue set value='69.583333' where id=18757; +update datasetfieldvalue set value='69.583333' where id=18760; +update datasetfieldvalue set value='19.216667' where id=18774; +update datasetfieldvalue set value='69.583333' where id=18776; +update datasetfieldvalue set value='69.583333' where id=18805; +update datasetfieldvalue set value='19.216667' where id=18806; +update datasetfieldvalue set value='69.583333' where id=18822; +update datasetfieldvalue set value='19.216667' where id=18848; +update datasetfieldvalue set value='19.216667' where id=18849; +update datasetfieldvalue set value='69.583333' where id=18850; +update datasetfieldvalue set value='19.216667' where id=18856; +update datasetfieldvalue set value='69.583333' where id=18857; +update datasetfieldvalue set value='69.583333' where id=18880; +update datasetfieldvalue set value='19.216667' where id=18889; +update datasetfieldvalue set value='69.583333' where id=18909; +update datasetfieldvalue set value='19.216667' where id=18911; +update datasetfieldvalue set value='69.583333' where id=18920; +update datasetfieldvalue set value='19.216667' where id=18937; +update datasetfieldvalue set value='69.583333' where id=18956; +update datasetfieldvalue set value='19.216667' where id=18963; +update datasetfieldvalue set value='19.216667' where id=18969; +update datasetfieldvalue set value='69.583333' where id=18979; +update datasetfieldvalue set value='69.583333' where id=18982; +update datasetfieldvalue set value='19.216667' where id=18996; +update datasetfieldvalue set value='69.583333' where id=19014; +update datasetfieldvalue set value='19.216667' where id=19017; +update datasetfieldvalue set value='19.216667' where id=19026; +update datasetfieldvalue set value='69.583333' where id=19044; +update datasetfieldvalue set value='69.583333' where id=19047; +update datasetfieldvalue set value='19.216667' where id=19052; +update datasetfieldvalue set value='19.216667' where id=19072; +update datasetfieldvalue set value='69.583333' where id=19083; +update datasetfieldvalue set value='69.583333' where id=19090; +update datasetfieldvalue set value='19.216667' where id=19098; +update datasetfieldvalue set value='69.583333' where id=19119; +update datasetfieldvalue set value='69.583333' where id=19141; +update datasetfieldvalue set value='19.216667' where id=19142; +update datasetfieldvalue set value='19.216667' where id=19146; +update datasetfieldvalue set value='19.216667' where id=19157; +update datasetfieldvalue set value='69.583333' where id=19172; +update datasetfieldvalue set value='69.583333' where id=19178; +update datasetfieldvalue set value='19.216667' where id=19180; +update datasetfieldvalue set value='69.583333' where id=19194; +update datasetfieldvalue set value='19.216667' where id=19196; +update datasetfieldvalue set value='69.583333' where id=19197; +update datasetfieldvalue set value='19.216667' where id=19204; +update datasetfieldvalue set value='19.216667' where id=19238; +update datasetfieldvalue set value='69.583333' where id=19246; +update datasetfieldvalue set value='19.216667' where id=19251; +update datasetfieldvalue set value='69.583333' where id=19271; +update datasetfieldvalue set value='19.216667' where id=19283; +update datasetfieldvalue set value='69.583333' where id=19289; +update datasetfieldvalue set value='19.216667' where id=19294; +update datasetfieldvalue set value='69.583333' where id=19303; +update datasetfieldvalue set value='69.583333' where id=19319; +update datasetfieldvalue set value='69.583333' where id=19348; +update datasetfieldvalue set value='19.216667' where id=19350; +update datasetfieldvalue set value='19.216667' where id=19354; +update datasetfieldvalue set value='19.216667' where id=19362; +update datasetfieldvalue set value='69.583333' where id=19368; +update datasetfieldvalue set value='69.583333' where id=19370; +update datasetfieldvalue set value='19.216667' where id=19393; +update datasetfieldvalue set value='69.583333' where id=19409; +update datasetfieldvalue set value='69.583333' where id=19413; +update datasetfieldvalue set value='19.216667' where id=19417; +update datasetfieldvalue set value='19.216667' where id=19442; +update datasetfieldvalue set value='19.216667' where id=19449; +update datasetfieldvalue set value='69.583333' where id=19452; +update datasetfieldvalue set value='69.583333' where id=19465; +update datasetfieldvalue set value='19.216667' where id=19478; +update datasetfieldvalue set value='19.216667' where id=19486; +update datasetfieldvalue set value='69.583333' where id=19498; +update datasetfieldvalue set value='19.216667' where id=19506; +update datasetfieldvalue set value='69.583333' where id=19508; +update datasetfieldvalue set value='69.583333' where id=19532; +update datasetfieldvalue set value='69.583333' where id=19538; +update datasetfieldvalue set value='19.216667' where id=19541; +update datasetfieldvalue set value='19.216667' where id=19561; +update datasetfieldvalue set value='19.216667' where id=19570; +update datasetfieldvalue set value='69.583333' where id=19594; +update datasetfieldvalue set value='69.583333' where id=19603; +update datasetfieldvalue set value='19.216667' where id=19611; +update datasetfieldvalue set value='69.583333' where id=19613; +update datasetfieldvalue set value='19.216667' where id=19616; +update datasetfieldvalue set value='69.583333' where id=19630; +update datasetfieldvalue set value='19.216667' where id=19653; +update datasetfieldvalue set value='69.583333' where id=19659; +update datasetfieldvalue set value='19.216667' where id=19674; +update datasetfieldvalue set value='19.216667' where id=19677; +update datasetfieldvalue set value='69.583333' where id=19684; +update datasetfieldvalue set value='69.583333' where id=19714; +update datasetfieldvalue set value='19.216667' where id=19733; +update datasetfieldvalue set value='19.216667' where id=19735; +update datasetfieldvalue set value='69.583333' where id=19736; +update datasetfieldvalue set value='69.583333' where id=19744; +update datasetfieldvalue set value='69.583333' where id=19768; +update datasetfieldvalue set value='19.216667' where id=19770; +update datasetfieldvalue set value='19.216667' where id=19777; +update datasetfieldvalue set value='69.583333' where id=19780; +update datasetfieldvalue set value='19.216667' where id=19786; +update datasetfieldvalue set value='69.583333' where id=19800; +update datasetfieldvalue set value='19.216667' where id=19809; +update datasetfieldvalue set value='69.583333' where id=19829; +update datasetfieldvalue set value='19.216667' where id=19830; +update datasetfieldvalue set value='19.216667' where id=19837; +update datasetfieldvalue set value='69.583333' where id=19863; +update datasetfieldvalue set value='69.583333' where id=19896; +update datasetfieldvalue set value='69.583333' where id=19909; +update datasetfieldvalue set value='19.216667' where id=19912; +update datasetfieldvalue set value='19.216667' where id=19927; +update datasetfieldvalue set value='19.216667' where id=19939; +update datasetfieldvalue set value='69.583333' where id=19940; +update datasetfieldvalue set value='19.216667' where id=19944; +update datasetfieldvalue set value='69.583333' where id=19947; +update datasetfieldvalue set value='19.216667' where id=19977; +update datasetfieldvalue set value='69.583333' where id=19982; +update datasetfieldvalue set value='19.216667' where id=20008; +update datasetfieldvalue set value='69.583333' where id=20010; +update datasetfieldvalue set value='69.583333' where id=20018; +update datasetfieldvalue set value='19.216667' where id=20021; +update datasetfieldvalue set value='69.583333' where id=20023; +update datasetfieldvalue set value='19.216667' where id=20034; +update datasetfieldvalue set value='69.583333' where id=20071; +update datasetfieldvalue set value='69.583333' where id=20080; +update datasetfieldvalue set value='19.216667' where id=20083; +update datasetfieldvalue set value='19.216667' where id=20097; +update datasetfieldvalue set value='69.583333' where id=20102; +update datasetfieldvalue set value='19.216667' where id=20122; +update datasetfieldvalue set value='19.216667' where id=20123; +update datasetfieldvalue set value='69.583333' where id=20135; +update datasetfieldvalue set value='69.583333' where id=20152; +update datasetfieldvalue set value='19.216667' where id=20165; +update datasetfieldvalue set value='69.583333' where id=20166; +update datasetfieldvalue set value='19.216667' where id=20183; +update datasetfieldvalue set value='69.583333' where id=20205; +update datasetfieldvalue set value='19.216667' where id=20208; +update datasetfieldvalue set value='19.216667' where id=20209; +update datasetfieldvalue set value='69.583333' where id=20226; +update datasetfieldvalue set value='19.216667' where id=20242; +update datasetfieldvalue set value='69.583333' where id=20243; +update datasetfieldvalue set value='69.583333' where id=20259; +update datasetfieldvalue set value='19.216667' where id=20269; +update datasetfieldvalue set value='19.216667' where id=20276; +update datasetfieldvalue set value='19.216667' where id=20299; +update datasetfieldvalue set value='69.583333' where id=20308; +update datasetfieldvalue set value='69.583333' where id=20310; +update datasetfieldvalue set value='69.583333' where id=20315; +update datasetfieldvalue set value='19.216667' where id=20316; +update datasetfieldvalue set value='69.583333' where id=20330; +update datasetfieldvalue set value='19.216667' where id=20344; +update datasetfieldvalue set value='69.583333' where id=20358; +update datasetfieldvalue set value='19.216667' where id=20361; +update datasetfieldvalue set value='69.583333' where id=20379; +update datasetfieldvalue set value='19.216667' where id=20394; +update datasetfieldvalue set value='19.216667' where id=20399; +update datasetfieldvalue set value='69.583333' where id=20406; +update datasetfieldvalue set value='69.583333' where id=20416; +update datasetfieldvalue set value='19.216667' where id=20417; +update datasetfieldvalue set value='19.216667' where id=20443; +update datasetfieldvalue set value='69.583333' where id=20454; +update datasetfieldvalue set value='69.583333' where id=20458; +update datasetfieldvalue set value='19.216667' where id=20471; +update datasetfieldvalue set value='69.583333' where id=20488; +update datasetfieldvalue set value='19.216667' where id=20492; +update datasetfieldvalue set value='19.216667' where id=20503; +update datasetfieldvalue set value='69.583333' where id=20511; +update datasetfieldvalue set value='69.583333' where id=20523; +update datasetfieldvalue set value='19.216667' where id=20528; +update datasetfieldvalue set value='69.583333' where id=20552; +update datasetfieldvalue set value='19.216667' where id=20559; +update datasetfieldvalue set value='69.583333' where id=20567; +update datasetfieldvalue set value='69.583333' where id=20569; +update datasetfieldvalue set value='19.216667' where id=20570; +update datasetfieldvalue set value='19.216667' where id=20575; +update datasetfieldvalue set value='69.583333' where id=20608; +update datasetfieldvalue set value='19.216667' where id=20617; +update datasetfieldvalue set value='69.583333' where id=20637; +update datasetfieldvalue set value='19.216667' where id=20639; +update datasetfieldvalue set value='69.583333' where id=20650; +update datasetfieldvalue set value='19.216667' where id=20651; +update datasetfieldvalue set value='19.216667' where id=20669; +update datasetfieldvalue set value='69.583333' where id=20673; +update datasetfieldvalue set value='19.216667' where id=20694; +update datasetfieldvalue set value='69.583333' where id=20700; +update datasetfieldvalue set value='19.216667' where id=20718; +update datasetfieldvalue set value='69.583333' where id=20728; +update datasetfieldvalue set value='19.216667' where id=20737; +update datasetfieldvalue set value='19.216667' where id=20749; +update datasetfieldvalue set value='69.583333' where id=20768; +update datasetfieldvalue set value='69.583333' where id=20769; +update datasetfieldvalue set value='19.216667' where id=20784; +update datasetfieldvalue set value='69.583333' where id=20799; +update datasetfieldvalue set value='19.216667' where id=20800; +update datasetfieldvalue set value='69.583333' where id=20803; +update datasetfieldvalue set value='69.583333' where id=20816; +update datasetfieldvalue set value='19.216667' where id=20844; +update datasetfieldvalue set value='69.583333' where id=20848; +update datasetfieldvalue set value='19.216667' where id=20853; +update datasetfieldvalue set value='19.216667' where id=20859; +update datasetfieldvalue set value='69.583333' where id=20886; +update datasetfieldvalue set value='69.583333' where id=20887; +update datasetfieldvalue set value='19.216667' where id=20888; +update datasetfieldvalue set value='19.216667' where id=20912; +update datasetfieldvalue set value='19.216667' where id=20913; +update datasetfieldvalue set value='69.583333' where id=20926; +update datasetfieldvalue set value='69.583333' where id=20933; +update datasetfieldvalue set value='19.216667' where id=20944; +update datasetfieldvalue set value='19.216667' where id=20955; +update datasetfieldvalue set value='69.583333' where id=20958; +update datasetfieldvalue set value='69.583333' where id=20975; +update datasetfieldvalue set value='19.216667' where id=20991; +update datasetfieldvalue set value='69.583333' where id=20993; +update datasetfieldvalue set value='69.583333' where id=21005; +update datasetfieldvalue set value='19.216667' where id=21022; +update datasetfieldvalue set value='69.583333' where id=21042; +update datasetfieldvalue set value='19.216667' where id=21046; +update datasetfieldvalue set value='69.583333' where id=21055; +update datasetfieldvalue set value='19.216667' where id=21056; +update datasetfieldvalue set value='69.583333' where id=21068; +update datasetfieldvalue set value='69.583333' where id=21069; +update datasetfieldvalue set value='19.216667' where id=21099; +update datasetfieldvalue set value='19.216667' where id=21102; +update datasetfieldvalue set value='69.583333' where id=21110; +update datasetfieldvalue set value='19.216667' where id=21124; +update datasetfieldvalue set value='69.583333' where id=21131; +update datasetfieldvalue set value='19.216667' where id=21150; +update datasetfieldvalue set value='69.583333' where id=21154; +update datasetfieldvalue set value='19.216667' where id=21179; +update datasetfieldvalue set value='69.583333' where id=21188; +update datasetfieldvalue set value='19.216667' where id=21193; +update datasetfieldvalue set value='69.583333' where id=21196; +update datasetfieldvalue set value='19.216667' where id=21198; +update datasetfieldvalue set value='69.583333' where id=21218; +update datasetfieldvalue set value='19.216667' where id=21232; +update datasetfieldvalue set value='69.583333' where id=21251; +update datasetfieldvalue set value='19.216667' where id=21261; +update datasetfieldvalue set value='69.583333' where id=21274; +update datasetfieldvalue set value='19.216667' where id=21277; +update datasetfieldvalue set value='69.583333' where id=21284; +update datasetfieldvalue set value='69.583333' where id=21301; +update datasetfieldvalue set value='19.216667' where id=21308; +update datasetfieldvalue set value='19.216667' where id=21318; +update datasetfieldvalue set value='19.216667' where id=21332; +update datasetfieldvalue set value='19.216667' where id=21354; +update datasetfieldvalue set value='69.583333' where id=21357; +update datasetfieldvalue set value='69.583333' where id=21361; +update datasetfieldvalue set value='69.583333' where id=21390; +update datasetfieldvalue set value='69.583333' where id=21395; +update datasetfieldvalue set value='19.216667' where id=21399; +update datasetfieldvalue set value='19.216667' where id=21402; +update datasetfieldvalue set value='19.216667' where id=21424; +update datasetfieldvalue set value='69.583333' where id=21434; +update datasetfieldvalue set value='69.583333' where id=21444; +update datasetfieldvalue set value='19.216667' where id=21445; +update datasetfieldvalue set value='19.216667' where id=21446; +update datasetfieldvalue set value='69.583333' where id=21453; +update datasetfieldvalue set value='69.583333' where id=21467; +update datasetfieldvalue set value='19.216667' where id=21473; +update datasetfieldvalue set value='19.216667' where id=21491; +update datasetfieldvalue set value='69.583333' where id=21499; +update datasetfieldvalue set value='69.583333' where id=21513; +update datasetfieldvalue set value='19.216667' where id=21514; +update datasetfieldvalue set value='69.583333' where id=21546; +update datasetfieldvalue set value='19.216667' where id=21550; +update datasetfieldvalue set value='19.216667' where id=21557; +update datasetfieldvalue set value='69.583333' where id=21567; +update datasetfieldvalue set value='19.216667' where id=21590; +update datasetfieldvalue set value='69.583333' where id=21606; +update datasetfieldvalue set value='19.216667' where id=21610; +update datasetfieldvalue set value='69.583333' where id=21611; +update datasetfieldvalue set value=69.0 where id=21626; +update datasetfieldvalue set value=70.90 where id=21636; +update datasetfieldvalue set value=31.05 where id=21644; +update datasetfieldvalue set value=28.17 where id=21651; +update datasetfieldvalue set value='69.583333' where id=21954; +update datasetfieldvalue set value='69.583333' where id=21962; +update datasetfieldvalue set value='19.216667' where id=21969; +update datasetfieldvalue set value='19.216667' where id=21971; +update datasetfieldvalue set value='31.76' where id=22305; +update datasetfieldvalue set value='57.76' where id=22308; +update datasetfieldvalue set value='4.09' where id=22333; +update datasetfieldvalue set value='71.38' where id=22343; +update datasetfieldvalue set value='19.216667' where id=22364; +update datasetfieldvalue set value='69.583333' where id=22368; +update datasetfieldvalue set value='69.583333' where id=22378; +update datasetfieldvalue set value='19.216667' where id=22385; +update datasetfieldvalue set value='19.216667' where id=22465; +update datasetfieldvalue set value='69.583333' where id=22469; +update datasetfieldvalue set value='69.583333' where id=22487; +update datasetfieldvalue set value='19.216667' where id=22489; +update datasetfieldvalue set value='9.333333' where id=22494; +update datasetfieldvalue set value='78.666667' where id=22495; +update datasetfieldvalue set value='9.666667' where id=22496; +update datasetfieldvalue set value='78.5' where id=22497; +update datasetfieldvalue set value='16.148889' where id=22621; +update datasetfieldvalue set value='76.147778' where id=22622; +update datasetfieldvalue set value='76.084167' where id=22623; +update datasetfieldvalue set value='15.824444' where id=22625; +update datasetfieldvalue set value='69.583333' where id=22930; +update datasetfieldvalue set value='69.583333' where id=22947; +update datasetfieldvalue set value='19.216667' where id=22950; +update datasetfieldvalue set value='19.216667' where id=22956; +update datasetfieldvalue set value='19.216667' where id=22967; +update datasetfieldvalue set value='19.216667' where id=22969; +update datasetfieldvalue set value='69.583333' where id=22979; +update datasetfieldvalue set value='69.583333' where id=23007; +update datasetfieldvalue set value='19.216667' where id=23021; +update datasetfieldvalue set value='69.583333' where id=23027; +update datasetfieldvalue set value='69.583333' where id=23031; +update datasetfieldvalue set value='19.216667' where id=23049; +update datasetfieldvalue set value='19.216667' where id=23057; +update datasetfieldvalue set value='19.216667' where id=23060; +update datasetfieldvalue set value='69.583333' where id=23078; +update datasetfieldvalue set value='69.583333' where id=23088; +update datasetfieldvalue set value='69.583333' where id=23488; +update datasetfieldvalue set value='19.216667' where id=23498; +update datasetfieldvalue set value='19.216667' where id=23511; +update datasetfieldvalue set value='69.583333' where id=23513; +update datasetfieldvalue set value='19.216667' where id=23579; +update datasetfieldvalue set value='19.216667' where id=23592; +update datasetfieldvalue set value='69.583333' where id=23593; +update datasetfieldvalue set value='69.583333' where id=23597; +update datasetfieldvalue set value='69.583333' where id=23603; +update datasetfieldvalue set value='19.216667' where id=23634; +update datasetfieldvalue set value='69.583333' where id=23635; +update datasetfieldvalue set value='19.216667' where id=23639; +update datasetfieldvalue set value='19.216667' where id=23652; +update datasetfieldvalue set value='69.583333' where id=23670; +update datasetfieldvalue set value='19.216667' where id=23675; +update datasetfieldvalue set value='69.583333' where id=23676; +update datasetfieldvalue set value='19.216667' where id=23695; +update datasetfieldvalue set value='19.216667' where id=23714; +update datasetfieldvalue set value='69.583333' where id=23716; +update datasetfieldvalue set value='69.583333' where id=23724; +update datasetfieldvalue set value='19.216667' where id=23732; +update datasetfieldvalue set value='69.583333' where id=23747; +update datasetfieldvalue set value='19.216667' where id=23751; +update datasetfieldvalue set value='69.583333' where id=23763; +update datasetfieldvalue set value='69.583333' where id=23768; +update datasetfieldvalue set value='69.583333' where id=23786; +update datasetfieldvalue set value='19.216667' where id=23787; +update datasetfieldvalue set value='19.216667' where id=23795; +update datasetfieldvalue set value='78.666667' where id=23900; +update datasetfieldvalue set value='9.666667' where id=23936; +update datasetfieldvalue set value='78.5' where id=23940; +update datasetfieldvalue set value='9.333333' where id=23944; +update datasetfieldvalue set value='19.216667' where id=23958; +update datasetfieldvalue set value='69.583333' where id=23963; +update datasetfieldvalue set value='19.216667' where id=23965; +update datasetfieldvalue set value='69.583333' where id=23972; +update datasetfieldvalue set value='19.216667' where id=24009; +update datasetfieldvalue set value='69.583333' where id=24015; +update datasetfieldvalue set value='69.583333' where id=24016; +update datasetfieldvalue set value='19.216667' where id=24035; +update datasetfieldvalue set value='69.583333' where id=24037; +update datasetfieldvalue set value='19.216667' where id=24048; +update datasetfieldvalue set value='69.583333' where id=24064; +update datasetfieldvalue set value='19.216667' where id=24065; +update datasetfieldvalue set value='69.583333' where id=24090; +update datasetfieldvalue set value='19.216667' where id=24092; +update datasetfieldvalue set value='69.583333' where id=24098; +update datasetfieldvalue set value='19.216667' where id=24105; +update datasetfieldvalue set value='69.583333' where id=24123; +update datasetfieldvalue set value='69.583333' where id=24127; +update datasetfieldvalue set value='19.216667' where id=24129; +update datasetfieldvalue set value='19.216667' where id=24144; +update datasetfieldvalue set value='19.216667' where id=24168; +update datasetfieldvalue set value='69.583333' where id=24177; +update datasetfieldvalue set value='69.583333' where id=24198; +update datasetfieldvalue set value='19.216667' where id=24202; +update datasetfieldvalue set value='19.216667' where id=24215; +update datasetfieldvalue set value='19.216667' where id=24226; +update datasetfieldvalue set value='69.583333' where id=24241; +update datasetfieldvalue set value='69.583333' where id=24243; +update datasetfieldvalue set value='69.583333' where id=24252; +update datasetfieldvalue set value='19.216667' where id=24253; +update datasetfieldvalue set value='19.216667' where id=24265; +update datasetfieldvalue set value='69.583333' where id=24283; +update datasetfieldvalue set value='69.583333' where id=24291; +update datasetfieldvalue set value='19.216667' where id=24294; +update datasetfieldvalue set value='69.583333' where id=24303; +update datasetfieldvalue set value='19.216667' where id=24329; +update datasetfieldvalue set value='69.583333' where id=24331; +update datasetfieldvalue set value='19.216667' where id=24351; +update datasetfieldvalue set value='69.583333' where id=24352; +update datasetfieldvalue set value='19.216667' where id=24360; +update datasetfieldvalue set value='19.216667' where id=24377; +update datasetfieldvalue set value='69.583333' where id=24393; +update datasetfieldvalue set value='69.583333' where id=24404; +update datasetfieldvalue set value='19.216667' where id=24406; +update datasetfieldvalue set value='69.583333' where id=24415; +update datasetfieldvalue set value='69.583333' where id=24426; +update datasetfieldvalue set value='19.216667' where id=24440; +update datasetfieldvalue set value='19.216667' where id=24455; +update datasetfieldvalue set value='69.583333' where id=24457; +update datasetfieldvalue set value='19.216667' where id=24458; +update datasetfieldvalue set value='19.216667' where id=24474; +update datasetfieldvalue set value='69.583333' where id=24481; +update datasetfieldvalue set value='19.216667' where id=24523; +update datasetfieldvalue set value='69.583333' where id=24535; +update datasetfieldvalue set value='69.583333' where id=24538; +update datasetfieldvalue set value='19.216667' where id=24539; +update datasetfieldvalue set value='19.216667' where id=24552; +update datasetfieldvalue set value='69.583333' where id=24559; +update datasetfieldvalue set value='69.583333' where id=24572; +update datasetfieldvalue set value='19.216667' where id=24580; +update datasetfieldvalue set value='69.583333' where id=24589; +update datasetfieldvalue set value='19.216667' where id=24592; +update datasetfieldvalue set value='69.583333' where id=24600; +update datasetfieldvalue set value='19.216667' where id=24610; +update datasetfieldvalue set value='69.583333' where id=24646; +update datasetfieldvalue set value='69.583333' where id=24652; +update datasetfieldvalue set value='19.216667' where id=24656; +update datasetfieldvalue set value='19.216667' where id=24657; +update datasetfieldvalue set value='19.216667' where id=24679; +update datasetfieldvalue set value='69.583333' where id=24689; +update datasetfieldvalue set value='69.583333' where id=24695; +update datasetfieldvalue set value='19.216667' where id=24704; +update datasetfieldvalue set value='19.216667' where id=24715; +update datasetfieldvalue set value='19.216667' where id=24725; +update datasetfieldvalue set value='69.583333' where id=24742; +update datasetfieldvalue set value='69.583333' where id=24747; +update datasetfieldvalue set value='19.216667' where id=24750; +update datasetfieldvalue set value='69.583333' where id=24768; +update datasetfieldvalue set value='69.583333' where id=24783; +update datasetfieldvalue set value='19.216667' where id=24788; +update datasetfieldvalue set value='19.216667' where id=24794; +update datasetfieldvalue set value='69.583333' where id=24800; +update datasetfieldvalue set value='19.216667' where id=24806; +update datasetfieldvalue set value='69.583333' where id=24828; +update datasetfieldvalue set value='19.216667' where id=24836; +update datasetfieldvalue set value='69.583333' where id=24849; +update datasetfieldvalue set value='19.216667' where id=24854; +update datasetfieldvalue set value='69.583333' where id=24871; +update datasetfieldvalue set value='69.583333' where id=24881; +update datasetfieldvalue set value='19.216667' where id=24884; +update datasetfieldvalue set value='19.216667' where id=24905; +update datasetfieldvalue set value='69.583333' where id=24908; +update datasetfieldvalue set value='19.216667' where id=24918; +update datasetfieldvalue set value='69.583333' where id=24919; +update datasetfieldvalue set value='69.583333' where id=24924; +update datasetfieldvalue set value='19.216667' where id=24927; +update datasetfieldvalue set value='69.583333' where id=24975; +update datasetfieldvalue set value='19.216667' where id=24988; +update datasetfieldvalue set value='19.216667' where id=24993; +update datasetfieldvalue set value='69.583333' where id=24996; +update datasetfieldvalue set value='69.583333' where id=25006; +update datasetfieldvalue set value='19.216667' where id=25019; +update datasetfieldvalue set value='69.583333' where id=25020; +update datasetfieldvalue set value='19.216667' where id=25021; +update datasetfieldvalue set value='69.583333' where id=25049; +update datasetfieldvalue set value='69.583333' where id=25054; +update datasetfieldvalue set value='19.216667' where id=25062; +update datasetfieldvalue set value='19.216667' where id=25071; +update datasetfieldvalue set value='19.216667' where id=25088; +update datasetfieldvalue set value='69.583333' where id=25110; +update datasetfieldvalue set value='19.216667' where id=25112; +update datasetfieldvalue set value='69.583333' where id=25120; +update datasetfieldvalue set value='69.583333' where id=25132; +update datasetfieldvalue set value='69.583333' where id=25133; +update datasetfieldvalue set value='19.216667' where id=25142; +update datasetfieldvalue set value='19.216667' where id=25154; +update datasetfieldvalue set value='69.583333' where id=25190; +update datasetfieldvalue set value='69.583333' where id=25194; +update datasetfieldvalue set value='19.216667' where id=25204; +update datasetfieldvalue set value='19.216667' where id=25211; +update datasetfieldvalue set value='69.583333' where id=25214; +update datasetfieldvalue set value='19.216667' where id=25225; +update datasetfieldvalue set value='69.583333' where id=25227; +update datasetfieldvalue set value='19.216667' where id=25250; +update datasetfieldvalue set value='19.216667' where id=25254; +update datasetfieldvalue set value='69.583333' where id=25269; +update datasetfieldvalue set value='19.216667' where id=25287; +update datasetfieldvalue set value='69.583333' where id=25290; +update datasetfieldvalue set value='19.216667' where id=25298; +update datasetfieldvalue set value='19.216667' where id=25300; +update datasetfieldvalue set value='69.583333' where id=25324; +update datasetfieldvalue set value='69.583333' where id=25329; +update datasetfieldvalue set value='19.216667' where id=25354; +update datasetfieldvalue set value='19.216667' where id=25357; +update datasetfieldvalue set value='69.583333' where id=25361; +update datasetfieldvalue set value='69.583333' where id=25376; +update datasetfieldvalue set value='69.583333' where id=25388; +update datasetfieldvalue set value='19.216667' where id=25399; +update datasetfieldvalue set value='69.583333' where id=25404; +update datasetfieldvalue set value='19.216667' where id=25406; +update datasetfieldvalue set value='19.216667' where id=25436; +update datasetfieldvalue set value='69.583333' where id=25438; +update datasetfieldvalue set value='69.583333' where id=25440; +update datasetfieldvalue set value='19.216667' where id=25445; +update datasetfieldvalue set value='69.583333' where id=25466; +update datasetfieldvalue set value='19.216667' where id=25477; +update datasetfieldvalue set value='19.216667' where id=25502; +update datasetfieldvalue set value='69.583333' where id=25503; +update datasetfieldvalue set value='19.216667' where id=25508; +update datasetfieldvalue set value='69.583333' where id=25511; +update datasetfieldvalue set value='69.583333' where id=25521; +update datasetfieldvalue set value='19.216667' where id=25523; +update datasetfieldvalue set value='69.583333' where id=25550; +update datasetfieldvalue set value='19.216667' where id=25566; +update datasetfieldvalue set value='69.583333' where id=25575; +update datasetfieldvalue set value='19.216667' where id=25580; +update datasetfieldvalue set value='19.216667' where id=25591; +update datasetfieldvalue set value='19.216667' where id=25594; +update datasetfieldvalue set value='69.583333' where id=25597; +update datasetfieldvalue set value='69.583333' where id=25617; +update datasetfieldvalue set value='69.583333' where id=25632; +update datasetfieldvalue set value='69.583333' where id=25652; +update datasetfieldvalue set value='19.216667' where id=25656; +update datasetfieldvalue set value='19.216667' where id=25662; +update datasetfieldvalue set value='69.583333' where id=25691; +update datasetfieldvalue set value='19.216667' where id=25693; +update datasetfieldvalue set value='69.583333' where id=25698; +update datasetfieldvalue set value='19.216667' where id=25706; +update datasetfieldvalue set value='19.216667' where id=25717; +update datasetfieldvalue set value='69.583333' where id=25725; +update datasetfieldvalue set value='69.583333' where id=25731; +update datasetfieldvalue set value='19.216667' where id=25755; +update datasetfieldvalue set value='19.216667' where id=25761; +update datasetfieldvalue set value='69.583333' where id=25776; +update datasetfieldvalue set value='69.583333' where id=25784; +update datasetfieldvalue set value='19.216667' where id=25798; +update datasetfieldvalue set value='69.583333' where id=25800; +update datasetfieldvalue set value='19.216667' where id=25811; +update datasetfieldvalue set value='19.216667' where id=25816; +update datasetfieldvalue set value='69.583333' where id=25821; +update datasetfieldvalue set value='69.583333' where id=25864; +update datasetfieldvalue set value='19.216667' where id=25881; +update datasetfieldvalue set value='19.216667' where id=25882; +update datasetfieldvalue set value='69.583333' where id=25883; +update datasetfieldvalue set value='19.216667' where id=25890; +update datasetfieldvalue set value='69.583333' where id=25903; +update datasetfieldvalue set value='19.216667' where id=25919; +update datasetfieldvalue set value='69.583333' where id=25921; +update datasetfieldvalue set value='19.216667' where id=25927; +update datasetfieldvalue set value='19.216667' where id=25938; +update datasetfieldvalue set value='69.583333' where id=25963; +update datasetfieldvalue set value='69.583333' where id=25964; +update datasetfieldvalue set value='19.216667' where id=25968; +update datasetfieldvalue set value='69.583333' where id=25984; +update datasetfieldvalue set value='69.583333' where id=25990; +update datasetfieldvalue set value='19.216667' where id=26008; +update datasetfieldvalue set value='19.216667' where id=26016; +update datasetfieldvalue set value='69.583333' where id=26021; +update datasetfieldvalue set value='19.216667' where id=26041; +update datasetfieldvalue set value='69.583333' where id=26044; +update datasetfieldvalue set value='19.216667' where id=26062; +update datasetfieldvalue set value='69.583333' where id=26063; +update datasetfieldvalue set value='19.216667' where id=26066; +update datasetfieldvalue set value='69.583333' where id=26087; +update datasetfieldvalue set value='69.583333' where id=26096; +update datasetfieldvalue set value='69.583333' where id=26124; +update datasetfieldvalue set value='19.216667' where id=26127; +update datasetfieldvalue set value='19.216667' where id=26135; +update datasetfieldvalue set value='69.583333' where id=26148; +update datasetfieldvalue set value='69.583333' where id=26153; +update datasetfieldvalue set value='19.216667' where id=26154; +update datasetfieldvalue set value='19.216667' where id=26158; +update datasetfieldvalue set value='19.216667' where id=26187; +update datasetfieldvalue set value='19.216667' where id=26188; +update datasetfieldvalue set value='69.583333' where id=26202; +update datasetfieldvalue set value='69.583333' where id=26208; +update datasetfieldvalue set value='19.216667' where id=26240; +update datasetfieldvalue set value='69.583333' where id=26249; +update datasetfieldvalue set value='19.216667' where id=26256; +update datasetfieldvalue set value='69.583333' where id=26261; +update datasetfieldvalue set value='19.216667' where id=26272; +update datasetfieldvalue set value='69.583333' where id=26285; +update datasetfieldvalue set value='69.583333' where id=26290; +update datasetfieldvalue set value='19.216667' where id=26301; +update datasetfieldvalue set value='69.583333' where id=26315; +update datasetfieldvalue set value='69.583333' where id=26323; +update datasetfieldvalue set value='19.216667' where id=26333; +update datasetfieldvalue set value='19.216667' where id=26344; +update datasetfieldvalue set value='69.583333' where id=26356; +update datasetfieldvalue set value='19.216667' where id=26363; +update datasetfieldvalue set value='19.216667' where id=26365; +update datasetfieldvalue set value='69.583333' where id=26367; +update datasetfieldvalue set value='19.216667' where id=26401; +update datasetfieldvalue set value='69.583333' where id=26403; +update datasetfieldvalue set value='69.583333' where id=26415; +update datasetfieldvalue set value='19.216667' where id=26428; +update datasetfieldvalue set value='69.583333' where id=26703; +update datasetfieldvalue set value='19.216667' where id=26718; +update datasetfieldvalue set value='69.583333' where id=26721; +update datasetfieldvalue set value='19.216667' where id=26733; +update datasetfieldvalue set value='19.216667' where id=26747; +update datasetfieldvalue set value='19.216667' where id=26753; +update datasetfieldvalue set value='69.583333' where id=26769; +update datasetfieldvalue set value='69.583333' where id=26775; +update datasetfieldvalue set value='19.216667' where id=26814; +update datasetfieldvalue set value='19.216667' where id=26822; +update datasetfieldvalue set value='69.583333' where id=26833; +update datasetfieldvalue set value='69.583333' where id=26841; +update datasetfieldvalue set value='19.216667' where id=26852; +update datasetfieldvalue set value='19.216667' where id=26856; +update datasetfieldvalue set value='69.583333' where id=26864; +update datasetfieldvalue set value='69.583333' where id=26880; +update datasetfieldvalue set value='69.583333' where id=26917; +update datasetfieldvalue set value='19.216667' where id=26921; +update datasetfieldvalue set value='19.216667' where id=26927; +update datasetfieldvalue set value='69.583333' where id=26930; +update datasetfieldvalue set value='19.216667' where id=26953; +update datasetfieldvalue set value='69.583333' where id=26957; +update datasetfieldvalue set value='69.583333' where id=26958; +update datasetfieldvalue set value='19.216667' where id=26972; +update datasetfieldvalue set value='19.216667' where id=26979; +update datasetfieldvalue set value='69.583333' where id=26993; +update datasetfieldvalue set value='19.216667' where id=26996; +update datasetfieldvalue set value='69.583333' where id=27010; +update datasetfieldvalue set value='69.583333' where id=27022; +update datasetfieldvalue set value='19.216667' where id=27040; +update datasetfieldvalue set value='19.216667' where id=27047; +update datasetfieldvalue set value='69.583333' where id=27054; +update datasetfieldvalue set value='19.216667' where id=27061; +update datasetfieldvalue set value='69.583333' where id=27067; +update datasetfieldvalue set value='19.216667' where id=27070; +update datasetfieldvalue set value='69.583333' where id=27071; +update datasetfieldvalue set value='19.216667' where id=27102; +update datasetfieldvalue set value='69.583333' where id=27107; +update datasetfieldvalue set value='69.583333' where id=27132; +update datasetfieldvalue set value='19.216667' where id=27135; +update datasetfieldvalue set value='69.583333' where id=27144; +update datasetfieldvalue set value='19.216667' where id=27150; +update datasetfieldvalue set value='69.583333' where id=27179; +update datasetfieldvalue set value='19.216667' where id=27184; +update datasetfieldvalue set value='19.216667' where id=27191; +update datasetfieldvalue set value='69.583333' where id=27199; +update datasetfieldvalue set value='19.216667' where id=27202; +update datasetfieldvalue set value='69.583333' where id=27214; +update datasetfieldvalue set value='19.216667' where id=27240; +update datasetfieldvalue set value='69.583333' where id=27245; +update datasetfieldvalue set value='69.583333' where id=27248; +update datasetfieldvalue set value='19.216667' where id=27264; +update datasetfieldvalue set value='69.583333' where id=27269; +update datasetfieldvalue set value='69.583333' where id=27271; +update datasetfieldvalue set value='19.216667' where id=27287; +update datasetfieldvalue set value='19.216667' where id=27305; +update datasetfieldvalue set value='69.583333' where id=27312; +update datasetfieldvalue set value='19.216667' where id=27326; +update datasetfieldvalue set value='19.216667' where id=27327; +update datasetfieldvalue set value='69.583333' where id=27333; +update datasetfieldvalue set value='69.583333' where id=27353; +update datasetfieldvalue set value='19.216667' where id=27360; +update datasetfieldvalue set value='19.216667' where id=27370; +update datasetfieldvalue set value='69.583333' where id=27389; +update datasetfieldvalue set value='69.583333' where id=27396; +update datasetfieldvalue set value='19.216667' where id=27398; +update datasetfieldvalue set value='69.583333' where id=27399; +update datasetfieldvalue set value='19.216667' where id=27405; +update datasetfieldvalue set value='69.583333' where id=27446; +update datasetfieldvalue set value='19.216667' where id=27455; +update datasetfieldvalue set value='19.216667' where id=27464; +update datasetfieldvalue set value='69.583333' where id=27475; +update datasetfieldvalue set value='19.216667' where id=27758; +update datasetfieldvalue set value='69.583333' where id=27765; +update datasetfieldvalue set value='69.583333' where id=27783; +update datasetfieldvalue set value='19.216667' where id=27789; +update datasetfieldvalue set value='19.216667' where id=27882; +update datasetfieldvalue set value='69.583333' where id=27888; +update datasetfieldvalue set value='69.583333' where id=27905; +update datasetfieldvalue set value='19.216667' where id=27915; +update datasetfieldvalue set value='19.216667' where id=27920; +update datasetfieldvalue set value='69.583333' where id=27926; +update datasetfieldvalue set value='19.216667' where id=27938; +update datasetfieldvalue set value='69.583333' where id=27950; +update datasetfieldvalue set value='19.216667' where id=27969; +update datasetfieldvalue set value='69.583333' where id=27971; +update datasetfieldvalue set value='69.583333' where id=27973; +update datasetfieldvalue set value='19.216667' where id=27986; +update datasetfieldvalue set value='19.216667' where id=28005; +update datasetfieldvalue set value='69.583333' where id=28014; +update datasetfieldvalue set value='69.583333' where id=28038; +update datasetfieldvalue set value='19.216667' where id=28039; +update datasetfieldvalue set value='19.216667' where id=28074; +update datasetfieldvalue set value='69.583333' where id=28084; +update datasetfieldvalue set value='19.216667' where id=28086; +update datasetfieldvalue set value='69.583333' where id=28087; +update datasetfieldvalue set value='69.583333' where id=28097; +update datasetfieldvalue set value='19.216667' where id=28116; +update datasetfieldvalue set value='69.583333' where id=28122; +update datasetfieldvalue set value='19.216667' where id=28124; +update datasetfieldvalue set value='69.583333' where id=28140; +update datasetfieldvalue set value='19.216667' where id=28145; +update datasetfieldvalue set value='19.216667' where id=28155; +update datasetfieldvalue set value='69.583333' where id=28161; +update datasetfieldvalue set value='69.583333' where id=28685; +update datasetfieldvalue set value='19.216667' where id=28698; +update datasetfieldvalue set value='69.583333' where id=28708; +update datasetfieldvalue set value='19.216667' where id=28709; +update datasetfieldvalue set value='19.216667' where id=28735; +update datasetfieldvalue set value='69.583333' where id=28737; +update datasetfieldvalue set value='69.583333' where id=28754; +update datasetfieldvalue set value='19.216667' where id=28760; +update datasetfieldvalue set value='69.583333' where id=28765; +update datasetfieldvalue set value='19.216667' where id=28778; +update datasetfieldvalue set value='19.216667' where id=28785; +update datasetfieldvalue set value='69.583333' where id=28800; +update datasetfieldvalue set value='69.583333' where id=28808; +update datasetfieldvalue set value='19.216667' where id=28818; +update datasetfieldvalue set value='69.583333' where id=28819; +update datasetfieldvalue set value='19.216667' where id=28842; +update datasetfieldvalue set value='69.583333' where id=28978; +update datasetfieldvalue set value='69.583333' where id=28991; +update datasetfieldvalue set value='19.216667' where id=28994; +update datasetfieldvalue set value='19.216667' where id=28996; +update datasetfieldvalue set value='19.216667' where id=29016; +update datasetfieldvalue set value='19.216667' where id=29029; +update datasetfieldvalue set value='69.583333' where id=29037; +update datasetfieldvalue set value='69.583333' where id=29047; +update datasetfieldvalue set value='19.216667' where id=29057; +update datasetfieldvalue set value='69.583333' where id=29067; +update datasetfieldvalue set value='19.216667' where id=29082; +update datasetfieldvalue set value='69.583333' where id=29094; +update datasetfieldvalue set value='19.216667' where id=29102; +update datasetfieldvalue set value='69.583333' where id=29104; +update datasetfieldvalue set value='19.216667' where id=29115; +update datasetfieldvalue set value='69.583333' where id=29136; +update datasetfieldvalue set value='19.216667' where id=29141; +update datasetfieldvalue set value='69.583333' where id=29146; +update datasetfieldvalue set value='69.583333' where id=29150; +update datasetfieldvalue set value='19.216667' where id=29178; +update datasetfieldvalue set value='69.583333' where id=29187; +update datasetfieldvalue set value='69.583333' where id=29196; +update datasetfieldvalue set value='19.216667' where id=29202; +update datasetfieldvalue set value='19.216667' where id=29210; +update datasetfieldvalue set value='19.216667' where id=29226; +update datasetfieldvalue set value='69.583333' where id=29230; +update datasetfieldvalue set value='19.216667' where id=29249; +update datasetfieldvalue set value='69.583333' where id=29255; +update datasetfieldvalue set value='69.583333' where id=29275; +update datasetfieldvalue set value='19.216667' where id=29283; +update datasetfieldvalue set value='69.583333' where id=29292; +update datasetfieldvalue set value='19.216667' where id=29298; +update datasetfieldvalue set value='19.216667' where id=29306; +update datasetfieldvalue set value='69.583333' where id=29310; +update datasetfieldvalue set value='69.583333' where id=29317; +update datasetfieldvalue set value='19.216667' where id=29346; +update datasetfieldvalue set value='19.216667' where id=29359; +update datasetfieldvalue set value='19.216667' where id=29363; +update datasetfieldvalue set value='69.583333' where id=29384; +update datasetfieldvalue set value='69.583333' where id=29385; +update datasetfieldvalue set value='19.216667' where id=29396; +update datasetfieldvalue set value='69.583333' where id=29409; +update datasetfieldvalue set value='69.583333' where id=29410; +update datasetfieldvalue set value='19.216667' where id=29430; +update datasetfieldvalue set value='69.583333' where id=29437; +update datasetfieldvalue set value='19.216667' where id=29467; +update datasetfieldvalue set value='19.216667' where id=29468; +update datasetfieldvalue set value='69.583333' where id=29469; +update datasetfieldvalue set value='19.216667' where id=29478; +update datasetfieldvalue set value='69.583333' where id=29483; +update datasetfieldvalue set value='69.583333' where id=29492; +update datasetfieldvalue set value='19.216667' where id=29510; +update datasetfieldvalue set value='69.583333' where id=29529; +update datasetfieldvalue set value='19.216667' where id=29542; +update datasetfieldvalue set value='69.583333' where id=29543; +update datasetfieldvalue set value='19.216667' where id=29544; +update datasetfieldvalue set value='69.583333' where id=29566; +update datasetfieldvalue set value='19.216667' where id=29570; +update datasetfieldvalue set value='19.216667' where id=29595; +update datasetfieldvalue set value='69.583333' where id=29598; +update datasetfieldvalue set value='19.216667' where id=29602; +update datasetfieldvalue set value='69.583333' where id=29605; +update datasetfieldvalue set value='19.216667' where id=29610; +update datasetfieldvalue set value='69.583333' where id=29620; +update datasetfieldvalue set value='19.216667' where id=29656; +update datasetfieldvalue set value='69.583333' where id=29657; +update datasetfieldvalue set value='19.216667' where id=29676; +update datasetfieldvalue set value='69.583333' where id=29678; +update datasetfieldvalue set value='19.216667' where id=29686; +update datasetfieldvalue set value='69.583333' where id=29694; +update datasetfieldvalue set value='19.216667' where id=29696; +update datasetfieldvalue set value='69.583333' where id=29708; +update datasetfieldvalue set value='19.216667' where id=29727; +update datasetfieldvalue set value='19.216667' where id=29747; +update datasetfieldvalue set value='69.583333' where id=29749; +update datasetfieldvalue set value='69.583333' where id=29756; +update datasetfieldvalue set value='69.583333' where id=29777; +update datasetfieldvalue set value='69.583333' where id=29783; +update datasetfieldvalue set value='19.216667' where id=29797; +update datasetfieldvalue set value='19.216667' where id=29806; +update datasetfieldvalue set value='69.583333' where id=29811; +update datasetfieldvalue set value='19.216667' where id=29826; +update datasetfieldvalue set value='19.216667' where id=29828; +update datasetfieldvalue set value='69.583333' where id=29850; +update datasetfieldvalue set value='69.583333' where id=29862; +update datasetfieldvalue set value='69.583333' where id=29866; +update datasetfieldvalue set value='19.216667' where id=29884; +update datasetfieldvalue set value='19.216667' where id=29892; +update datasetfieldvalue set value='19.216667' where id=29894; +update datasetfieldvalue set value='69.583333' where id=29905; +update datasetfieldvalue set value='19.216667' where id=29912; +update datasetfieldvalue set value='69.583333' where id=29920; +update datasetfieldvalue set value='19.216667' where id=29946; +update datasetfieldvalue set value='19.216667' where id=29958; +update datasetfieldvalue set value='69.583333' where id=29959; +update datasetfieldvalue set value='69.583333' where id=29964; +update datasetfieldvalue set value='19.216667' where id=29997; +update datasetfieldvalue set value='69.583333' where id=30002; +update datasetfieldvalue set value='69.583333' where id=30007; +update datasetfieldvalue set value='19.216667' where id=30011; +update datasetfieldvalue set value='69.583333' where id=30021; +update datasetfieldvalue set value='69.583333' where id=30035; +update datasetfieldvalue set value='19.216667' where id=30058; +update datasetfieldvalue set value='19.216667' where id=30061; +update datasetfieldvalue set value='69.583333' where id=30062; +update datasetfieldvalue set value='69.583333' where id=30073; +update datasetfieldvalue set value='19.216667' where id=30084; +update datasetfieldvalue set value='19.216667' where id=30095; +update datasetfieldvalue set value='69.583333' where id=30106; +update datasetfieldvalue set value='19.216667' where id=30118; +update datasetfieldvalue set value='69.583333' where id=30120; +update datasetfieldvalue set value='19.216667' where id=30142; +update datasetfieldvalue set value='69.583333' where id=30151; +update datasetfieldvalue set value='19.216667' where id=30180; +update datasetfieldvalue set value='69.583333' where id=30182; +update datasetfieldvalue set value='19.216667' where id=30187; +update datasetfieldvalue set value='19.216667' where id=30197; +update datasetfieldvalue set value='19.216667' where id=30199; +update datasetfieldvalue set value='69.583333' where id=30203; +update datasetfieldvalue set value='69.583333' where id=30229; +update datasetfieldvalue set value='19.216667' where id=30239; +update datasetfieldvalue set value='69.583333' where id=30244; +update datasetfieldvalue set value='19.216667' where id=30247; +update datasetfieldvalue set value='69.583333' where id=30253; +update datasetfieldvalue set value='69.583333' where id=30279; +update datasetfieldvalue set value='69.583333' where id=30288; +update datasetfieldvalue set value='19.216667' where id=30303; +update datasetfieldvalue set value='19.216667' where id=30309; +update datasetfieldvalue set value='19.216667' where id=30316; +update datasetfieldvalue set value='19.216667' where id=30326; +update datasetfieldvalue set value='69.583333' where id=30347; +update datasetfieldvalue set value='69.583333' where id=30349; +update datasetfieldvalue set value='69.583333' where id=30361; +update datasetfieldvalue set value='19.216667' where id=30370; +update datasetfieldvalue set value='19.216667' where id=30372; +update datasetfieldvalue set value='69.583333' where id=30396; +update datasetfieldvalue set value='69.583333' where id=30400; +update datasetfieldvalue set value='19.216667' where id=30433; +update datasetfieldvalue set value='69.583333' where id=30437; +update datasetfieldvalue set value='19.216667' where id=30438; +update datasetfieldvalue set value='19.216667' where id=30500; +update datasetfieldvalue set value='69.583333' where id=30509; +update datasetfieldvalue set value='19.216667' where id=30511; +update datasetfieldvalue set value='69.583333' where id=30525; +update datasetfieldvalue set value='69.583333' where id=30551; +update datasetfieldvalue set value='19.216667' where id=30555; +update datasetfieldvalue set value='19.216667' where id=30557; +update datasetfieldvalue set value='69.583333' where id=30559; +update datasetfieldvalue set value='69.583333' where id=30577; +update datasetfieldvalue set value='19.216667' where id=30591; +update datasetfieldvalue set value='69.583333' where id=30592; +update datasetfieldvalue set value='19.216667' where id=30607; +update datasetfieldvalue set value='19.216667' where id=30622; +update datasetfieldvalue set value='69.583333' where id=30643; +update datasetfieldvalue set value='69.583333' where id=30647; +update datasetfieldvalue set value='19.216667' where id=30653; +update datasetfieldvalue set value='69.583333' where id=30665; +update datasetfieldvalue set value='19.216667' where id=30670; +update datasetfieldvalue set value='19.216667' where id=30675; +update datasetfieldvalue set value='69.583333' where id=30692; +update datasetfieldvalue set value='69.583333' where id=30710; +update datasetfieldvalue set value='69.583333' where id=30719; +update datasetfieldvalue set value='19.216667' where id=30724; +update datasetfieldvalue set value='19.216667' where id=30734; +update datasetfieldvalue set value='19.216667' where id=30749; +update datasetfieldvalue set value='69.583333' where id=30764; +update datasetfieldvalue set value='19.216667' where id=30769; +update datasetfieldvalue set value='69.583333' where id=30777; +update datasetfieldvalue set value='19.216667' where id=30794; +update datasetfieldvalue set value='19.216667' where id=30800; +update datasetfieldvalue set value='69.583333' where id=30807; +update datasetfieldvalue set value='69.583333' where id=30822; +update datasetfieldvalue set value='69.583333' where id=30829; +update datasetfieldvalue set value='19.216667' where id=30830; +update datasetfieldvalue set value='69.583333' where id=30852; +update datasetfieldvalue set value='19.216667' where id=30861; +update datasetfieldvalue set value='19.216667' where id=30889; +update datasetfieldvalue set value='19.216667' where id=30898; +update datasetfieldvalue set value='69.583333' where id=30901; +update datasetfieldvalue set value='69.583333' where id=30909; +update datasetfieldvalue set value='19.216667' where id=30918; +update datasetfieldvalue set value='19.216667' where id=30925; +update datasetfieldvalue set value='69.583333' where id=30936; +update datasetfieldvalue set value='69.583333' where id=30945; +update datasetfieldvalue set value='19.216667' where id=30956; +update datasetfieldvalue set value='19.216667' where id=30961; +update datasetfieldvalue set value='69.583333' where id=30976; +update datasetfieldvalue set value='69.583333' where id=30986; +update datasetfieldvalue set value='19.216667' where id=31004; +update datasetfieldvalue set value='19.216667' where id=31016; +update datasetfieldvalue set value='69.583333' where id=31028; +update datasetfieldvalue set value='69.583333' where id=31029; +update datasetfieldvalue set value='69.583333' where id=31050; +update datasetfieldvalue set value='19.216667' where id=31067; +update datasetfieldvalue set value='19.216667' where id=31074; +update datasetfieldvalue set value='69.583333' where id=31076; +update datasetfieldvalue set value='19.216667' where id=31094; +update datasetfieldvalue set value='69.583333' where id=31098; +update datasetfieldvalue set value='69.583333' where id=31103; +update datasetfieldvalue set value='19.216667' where id=31110; +update datasetfieldvalue set value='19.216667' where id=31133; +update datasetfieldvalue set value='19.216667' where id=31137; +update datasetfieldvalue set value='69.583333' where id=31147; +update datasetfieldvalue set value='69.583333' where id=31156; +update datasetfieldvalue set value='19.216667' where id=31162; +update datasetfieldvalue set value='19.216667' where id=31171; +update datasetfieldvalue set value='69.583333' where id=31186; +update datasetfieldvalue set value='69.583333' where id=31197; +update datasetfieldvalue set value='69.583333' where id=31207; +update datasetfieldvalue set value='69.583333' where id=31216; +update datasetfieldvalue set value='19.216667' where id=31224; +update datasetfieldvalue set value='19.216667' where id=31233; +update datasetfieldvalue set value='19.216667' where id=31252; +update datasetfieldvalue set value='69.583333' where id=31265; +update datasetfieldvalue set value='69.583333' where id=31283; +update datasetfieldvalue set value='19.216667' where id=31287; +update datasetfieldvalue set value='69.583333' where id=31303; +update datasetfieldvalue set value='19.216667' where id=31304; +update datasetfieldvalue set value='69.583333' where id=31311; +update datasetfieldvalue set value='19.216667' where id=31328; +update datasetfieldvalue set value='69.583333' where id=31337; +update datasetfieldvalue set value='19.216667' where id=31354; +update datasetfieldvalue set value='19.216667' where id=31355; +update datasetfieldvalue set value='69.583333' where id=31358; +update datasetfieldvalue set value='19.216667' where id=31380; +update datasetfieldvalue set value='69.583333' where id=31383; +update datasetfieldvalue set value='19.216667' where id=31389; +update datasetfieldvalue set value='69.583333' where id=31404; +update datasetfieldvalue set value='69.583333' where id=31433; +update datasetfieldvalue set value='69.583333' where id=31439; +update datasetfieldvalue set value='19.216667' where id=31441; +update datasetfieldvalue set value='19.216667' where id=31454; +update datasetfieldvalue set value='69.583333' where id=31463; +update datasetfieldvalue set value='69.583333' where id=31465; +update datasetfieldvalue set value='19.216667' where id=31474; +update datasetfieldvalue set value='19.216667' where id=31497; +update datasetfieldvalue set value='69.583333' where id=31515; +update datasetfieldvalue set value='69.583333' where id=31524; +update datasetfieldvalue set value='19.216667' where id=31526; +update datasetfieldvalue set value='19.216667' where id=31528; +update datasetfieldvalue set value='19.216667' where id=31544; +update datasetfieldvalue set value='69.583333' where id=31569; +update datasetfieldvalue set value='19.216667' where id=31572; +update datasetfieldvalue set value='69.583333' where id=31581; +update datasetfieldvalue set value='19.216667' where id=31594; +update datasetfieldvalue set value='19.216667' where id=31612; +update datasetfieldvalue set value='69.583333' where id=31618; +update datasetfieldvalue set value='69.583333' where id=31622; +update datasetfieldvalue set value='69.583333' where id=31630; +update datasetfieldvalue set value='69.583333' where id=31653; +update datasetfieldvalue set value='19.216667' where id=31657; +update datasetfieldvalue set value='19.216667' where id=31660; +update datasetfieldvalue set value='19.216667' where id=31668; +update datasetfieldvalue set value='69.583333' where id=31670; +update datasetfieldvalue set value='69.583333' where id=31683; +update datasetfieldvalue set value='19.216667' where id=31691; +update datasetfieldvalue set value='19.216667' where id=31718; +update datasetfieldvalue set value='69.583333' where id=31721; +update datasetfieldvalue set value='19.216667' where id=31722; +update datasetfieldvalue set value='69.583333' where id=31730; +update datasetfieldvalue set value='69.583333' where id=31776; +update datasetfieldvalue set value='19.216667' where id=31783; +update datasetfieldvalue set value='69.583333' where id=31787; +update datasetfieldvalue set value='19.216667' where id=31788; +update datasetfieldvalue set value='69.583333' where id=31792; +update datasetfieldvalue set value='69.583333' where id=31799; +update datasetfieldvalue set value='19.216667' where id=31800; +update datasetfieldvalue set value='19.216667' where id=31814; +update datasetfieldvalue set value='19.216667' where id=31834; +update datasetfieldvalue set value='69.583333' where id=31840; +update datasetfieldvalue set value='69.583333' where id=31854; +update datasetfieldvalue set value='19.216667' where id=31864; +update datasetfieldvalue set value='19.216667' where id=31898; +update datasetfieldvalue set value='19.216667' where id=31900; +update datasetfieldvalue set value='69.583333' where id=31901; +update datasetfieldvalue set value='69.583333' where id=31909; +update datasetfieldvalue set value='19.216667' where id=31922; +update datasetfieldvalue set value='19.216667' where id=31924; +update datasetfieldvalue set value='69.583333' where id=31937; +update datasetfieldvalue set value='69.583333' where id=31953; +update datasetfieldvalue set value='19.216667' where id=31973; +update datasetfieldvalue set value='69.583333' where id=31984; +update datasetfieldvalue set value='69.583333' where id=31988; +update datasetfieldvalue set value='19.216667' where id=31998; +update datasetfieldvalue set value='19.216667' where id=32007; +update datasetfieldvalue set value='69.583333' where id=32011; +update datasetfieldvalue set value='69.583333' where id=32016; +update datasetfieldvalue set value='19.216667' where id=32039; +update datasetfieldvalue set value='19.216667' where id=32052; +update datasetfieldvalue set value='19.216667' where id=32071; +update datasetfieldvalue set value='69.583333' where id=32074; +update datasetfieldvalue set value='69.583333' where id=32075; +update datasetfieldvalue set value='69.583333' where id=32087; +update datasetfieldvalue set value='69.583333' where id=32090; +update datasetfieldvalue set value='19.216667' where id=32097; +update datasetfieldvalue set value='19.216667' where id=32102; +update datasetfieldvalue set value='19.216667' where id=32132; +update datasetfieldvalue set value='69.583333' where id=32136; +update datasetfieldvalue set value='69.583333' where id=32148; +update datasetfieldvalue set value='19.216667' where id=32154; +update datasetfieldvalue set value='69.583333' where id=32180; +update datasetfieldvalue set value='19.216667' where id=32190; +update datasetfieldvalue set value='69.583333' where id=32191; +update datasetfieldvalue set value='19.216667' where id=32202; +update datasetfieldvalue set value='19.216667' where id=32215; +update datasetfieldvalue set value='19.216667' where id=32222; +update datasetfieldvalue set value='69.583333' where id=32225; +update datasetfieldvalue set value='69.583333' where id=32242; +update datasetfieldvalue set value='69.583333' where id=32277; +update datasetfieldvalue set value='19.216667' where id=32279; +update datasetfieldvalue set value='69.583333' where id=32289; +update datasetfieldvalue set value='19.216667' where id=32290; +update datasetfieldvalue set value='19.216667' where id=32296; +update datasetfieldvalue set value='69.583333' where id=32304; +update datasetfieldvalue set value='69.583333' where id=32322; +update datasetfieldvalue set value='19.216667' where id=32326; +update datasetfieldvalue set value='69.583333' where id=32350; +update datasetfieldvalue set value='69.583333' where id=32366; +update datasetfieldvalue set value='19.216667' where id=32378; +update datasetfieldvalue set value='19.216667' where id=32379; +update datasetfieldvalue set value='19.216667' where id=32393; +update datasetfieldvalue set value='19.216667' where id=32403; +update datasetfieldvalue set value='69.583333' where id=32406; +update datasetfieldvalue set value='69.583333' where id=32409; +update datasetfieldvalue set value='69.583333' where id=32425; +update datasetfieldvalue set value='19.216667' where id=32436; +update datasetfieldvalue set value='19.216667' where id=32437; +update datasetfieldvalue set value='69.583333' where id=32445; +update datasetfieldvalue set value='69.583333' where id=32469; +update datasetfieldvalue set value='19.216667' where id=32487; +update datasetfieldvalue set value='19.216667' where id=32495; +update datasetfieldvalue set value='69.583333' where id=32497; +update datasetfieldvalue set value='69.583333' where id=32515; +update datasetfieldvalue set value='69.583333' where id=32518; +update datasetfieldvalue set value='19.216667' where id=32521; +update datasetfieldvalue set value='19.216667' where id=32537; +update datasetfieldvalue set value='19.216667' where id=32559; +update datasetfieldvalue set value='69.583333' where id=32565; +update datasetfieldvalue set value='69.583333' where id=32570; +update datasetfieldvalue set value='19.216667' where id=32582; +update datasetfieldvalue set value='69.583333' where id=32655; +update datasetfieldvalue set value='19.216667' where id=32660; +update datasetfieldvalue set value='69.583333' where id=32665; +update datasetfieldvalue set value='19.216667' where id=32668; +update datasetfieldvalue set value='69.583333' where id=32685; +update datasetfieldvalue set value='19.216667' where id=32700; +update datasetfieldvalue set value='19.216667' where id=32701; +update datasetfieldvalue set value='69.583333' where id=32710; +update datasetfieldvalue set value='69.583333' where id=32716; +update datasetfieldvalue set value='19.216667' where id=32727; +update datasetfieldvalue set value='19.216667' where id=32738; +update datasetfieldvalue set value='69.583333' where id=32744; +update datasetfieldvalue set value='69.583333' where id=32759; +update datasetfieldvalue set value='69.583333' where id=32768; +update datasetfieldvalue set value='19.216667' where id=32788; +update datasetfieldvalue set value='19.216667' where id=32791; +update datasetfieldvalue set value='19.216667' where id=32814; +update datasetfieldvalue set value='19.216667' where id=32829; +update datasetfieldvalue set value='69.583333' where id=32838; +update datasetfieldvalue set value='69.583333' where id=32839; +update datasetfieldvalue set value='19.216667' where id=32848; +update datasetfieldvalue set value='69.583333' where id=32858; +update datasetfieldvalue set value='69.583333' where id=32859; +update datasetfieldvalue set value='19.216667' where id=32872; +update datasetfieldvalue set value='19.216667' where id=32893; +update datasetfieldvalue set value='19.216667' where id=32901; +update datasetfieldvalue set value='69.583333' where id=32914; +update datasetfieldvalue set value='69.583333' where id=32917; +update datasetfieldvalue set value='19.216667' where id=32928; +update datasetfieldvalue set value='69.583333' where id=32932; +update datasetfieldvalue set value='19.216667' where id=32962; +update datasetfieldvalue set value='69.583333' where id=32965; +update datasetfieldvalue set value='69.583333' where id=32977; +update datasetfieldvalue set value='19.216667' where id=32982; +update datasetfieldvalue set value='69.583333' where id=32999; +update datasetfieldvalue set value='19.216667' where id=33007; +update datasetfieldvalue set value='69.583333' where id=33016; +update datasetfieldvalue set value='19.216667' where id=33019; +update datasetfieldvalue set value='69.583333' where id=33021; +update datasetfieldvalue set value='19.216667' where id=33049; +update datasetfieldvalue set value='-82.0' where id=33052; +update datasetfieldvalue set value='-4.0' where id=33053; +update datasetfieldvalue set value='-79.0' where id=33054; +update datasetfieldvalue set value='-2.0' where id=33055; +update datasetfieldvalue set value='69.583333' where id=33063; +update datasetfieldvalue set value='19.216667' where id=33066; +update datasetfieldvalue set value='69.583333' where id=33069; +update datasetfieldvalue set value='19.216667' where id=33096; +update datasetfieldvalue set value='69.583333' where id=33107; +update datasetfieldvalue set value='69.583333' where id=33115; +update datasetfieldvalue set value='19.216667' where id=33126; +update datasetfieldvalue set value='19.216667' where id=33142; +update datasetfieldvalue set value='19.216667' where id=33153; +update datasetfieldvalue set value='69.583333' where id=33160; +update datasetfieldvalue set value='19.216667' where id=33168; +update datasetfieldvalue set value='69.583333' where id=33177; +update datasetfieldvalue set value='69.583333' where id=33196; +update datasetfieldvalue set value='19.216667' where id=33201; +update datasetfieldvalue set value='69.583333' where id=33204; +update datasetfieldvalue set value='19.216667' where id=33209; +update datasetfieldvalue set value='19.216667' where id=33247; +update datasetfieldvalue set value='69.583333' where id=33253; +update datasetfieldvalue set value='69.583333' where id=33256; +update datasetfieldvalue set value='19.216667' where id=33270; +update datasetfieldvalue set value='19.216667' where id=33272; +update datasetfieldvalue set value='69.583333' where id=33283; +update datasetfieldvalue set value='19.216667' where id=33285; +update datasetfieldvalue set value='69.583333' where id=33302; +update datasetfieldvalue set value='69.583333' where id=33328; +update datasetfieldvalue set value='19.216667' where id=33343; +update datasetfieldvalue set value='19.216667' where id=33345; +update datasetfieldvalue set value='69.583333' where id=33353; +update datasetfieldvalue set value='69.583333' where id=33364; +update datasetfieldvalue set value='19.216667' where id=33372; +update datasetfieldvalue set value='19.216667' where id=33390; +update datasetfieldvalue set value='69.583333' where id=33396; +update datasetfieldvalue set value='69.583333' where id=33397; +update datasetfieldvalue set value='19.216667' where id=33398; +update datasetfieldvalue set value='19.216667' where id=33423; +update datasetfieldvalue set value='69.583333' where id=33425; +update datasetfieldvalue set value='19.216667' where id=33439; +update datasetfieldvalue set value='19.216667' where id=33444; +update datasetfieldvalue set value='69.583333' where id=33447; +update datasetfieldvalue set value='69.583333' where id=33477; +update datasetfieldvalue set value='69.583333' where id=33486; +update datasetfieldvalue set value='19.216667' where id=33491; +update datasetfieldvalue set value='19.216667' where id=33501; +update datasetfieldvalue set value='69.583333' where id=33511; +update datasetfieldvalue set value='69.583333' where id=33530; +update datasetfieldvalue set value='19.216667' where id=33532; +update datasetfieldvalue set value='69.583333' where id=33534; +update datasetfieldvalue set value='19.216667' where id=33560; +update datasetfieldvalue set value='69.583333' where id=33579; +update datasetfieldvalue set value='69.583333' where id=33595; +update datasetfieldvalue set value='19.216667' where id=33598; +update datasetfieldvalue set value='19.216667' where id=33601; +update datasetfieldvalue set value='69.583333' where id=33612; +update datasetfieldvalue set value='19.216667' where id=33620; +update datasetfieldvalue set value='69.583333' where id=33636; +update datasetfieldvalue set value='19.216667' where id=33643; +update datasetfieldvalue set value='19.216667' where id=33649; +update datasetfieldvalue set value='69.583333' where id=33670; +update datasetfieldvalue set value='19.216667' where id=33680; +update datasetfieldvalue set value='69.583333' where id=33686; +update datasetfieldvalue set value='69.583333' where id=33695; +update datasetfieldvalue set value='19.216667' where id=33696; +update datasetfieldvalue set value='19.216667' where id=33718; +update datasetfieldvalue set value='69.583333' where id=33732; +update datasetfieldvalue set value='69.583333' where id=33735; +update datasetfieldvalue set value='19.216667' where id=33739; +update datasetfieldvalue set value='19.216667' where id=33755; +update datasetfieldvalue set value='69.583333' where id=33774; +update datasetfieldvalue set value='69.583333' where id=33777; +update datasetfieldvalue set value='19.216667' where id=33789; +update datasetfieldvalue set value='19.216667' where id=33806; +update datasetfieldvalue set value='69.583333' where id=33813; +update datasetfieldvalue set value='19.216667' where id=33831; +update datasetfieldvalue set value='69.583333' where id=33839; +update datasetfieldvalue set value='19.216667' where id=33850; +update datasetfieldvalue set value='69.583333' where id=33858; +update datasetfieldvalue set value='69.583333' where id=33872; +update datasetfieldvalue set value='19.216667' where id=33877; +update datasetfieldvalue set value='69.583333' where id=33880; +update datasetfieldvalue set value='19.216667' where id=33891; +update datasetfieldvalue set value='19.216667' where id=33914; +update datasetfieldvalue set value='19.216667' where id=33916; +update datasetfieldvalue set value='69.583333' where id=33922; +update datasetfieldvalue set value='69.583333' where id=33935; +update datasetfieldvalue set value='69.583333' where id=33955; +update datasetfieldvalue set value='69.583333' where id=33956; +update datasetfieldvalue set value='19.216667' where id=33958; +update datasetfieldvalue set value='19.216667' where id=33967; +update datasetfieldvalue set value='69.583333' where id=33989; +update datasetfieldvalue set value='19.216667' where id=33997; +update datasetfieldvalue set value='19.216667' where id=34004; +update datasetfieldvalue set value='69.583333' where id=34024; +update datasetfieldvalue set value='69.583333' where id=34040; +update datasetfieldvalue set value='19.216667' where id=34045; +update datasetfieldvalue set value='69.583333' where id=34059; +update datasetfieldvalue set value='19.216667' where id=34064; +update datasetfieldvalue set value='19.216667' where id=34070; +update datasetfieldvalue set value='19.216667' where id=34080; +update datasetfieldvalue set value='69.583333' where id=34092; +update datasetfieldvalue set value='69.583333' where id=34095; +update datasetfieldvalue set value='19.216667' where id=34129; +update datasetfieldvalue set value='19.216667' where id=34135; +update datasetfieldvalue set value='69.583333' where id=34143; +update datasetfieldvalue set value='69.583333' where id=34149; +update datasetfieldvalue set value='19.216667' where id=34161; +update datasetfieldvalue set value='19.216667' where id=34162; +update datasetfieldvalue set value='69.583333' where id=34172; +update datasetfieldvalue set value='69.583333' where id=34190; +update datasetfieldvalue set value='69.583333' where id=34198; +update datasetfieldvalue set value='19.216667' where id=34225; +update datasetfieldvalue set value='69.583333' where id=34226; +update datasetfieldvalue set value='19.216667' where id=34236; +update datasetfieldvalue set value='19.216667' where id=34238; +update datasetfieldvalue set value='19.216667' where id=34244; +update datasetfieldvalue set value='69.583333' where id=34265; +update datasetfieldvalue set value='69.583333' where id=34268; +update datasetfieldvalue set value='69.583333' where id=34296; +update datasetfieldvalue set value='19.216667' where id=34301; +update datasetfieldvalue set value='19.216667' where id=34311; +update datasetfieldvalue set value='69.583333' where id=34312; +update datasetfieldvalue set value='69.583333' where id=34322; +update datasetfieldvalue set value='19.216667' where id=34326; +update datasetfieldvalue set value='69.583333' where id=34348; +update datasetfieldvalue set value='19.216667' where id=34351; +update datasetfieldvalue set value='69.583333' where id=34367; +update datasetfieldvalue set value='19.216667' where id=34372; +update datasetfieldvalue set value='69.583333' where id=34381; +update datasetfieldvalue set value='19.216667' where id=34389; +update datasetfieldvalue set value='19.216667' where id=34408; +update datasetfieldvalue set value='69.583333' where id=34418; +update datasetfieldvalue set value='19.216667' where id=34425; +update datasetfieldvalue set value='69.583333' where id=34429; +update datasetfieldvalue set value='19.216667' where id=34451; +update datasetfieldvalue set value='19.216667' where id=34455; +update datasetfieldvalue set value='69.583333' where id=34467; +update datasetfieldvalue set value='69.583333' where id=34470; +update datasetfieldvalue set value='19.216667' where id=34497; +update datasetfieldvalue set value='69.583333' where id=34507; +update datasetfieldvalue set value='19.216667' where id=34519; +update datasetfieldvalue set value='69.583333' where id=34530; +update datasetfieldvalue set value='69.583333' where id=34541; +update datasetfieldvalue set value='69.583333' where id=34547; +update datasetfieldvalue set value='19.216667' where id=34556; +update datasetfieldvalue set value='19.216667' where id=34562; +update datasetfieldvalue set value='69.583333' where id=34577; +update datasetfieldvalue set value='19.216667' where id=34586; +update datasetfieldvalue set value='69.583333' where id=34601; +update datasetfieldvalue set value='19.216667' where id=34610; +update datasetfieldvalue set value='69.583333' where id=34622; +update datasetfieldvalue set value='19.216667' where id=34634; +update datasetfieldvalue set value='19.216667' where id=34644; +update datasetfieldvalue set value='69.583333' where id=34645; +update datasetfieldvalue set value='69.583333' where id=34660; +update datasetfieldvalue set value='19.216667' where id=34661; +update datasetfieldvalue set value='69.583333' where id=34664; +update datasetfieldvalue set value='19.216667' where id=34677; +update datasetfieldvalue set value='69.583333' where id=34703; +update datasetfieldvalue set value='69.583333' where id=34706; +update datasetfieldvalue set value='19.216667' where id=34730; +update datasetfieldvalue set value='19.216667' where id=34738; +update datasetfieldvalue set value='69.583333' where id=34761; +update datasetfieldvalue set value='69.583333' where id=34765; +update datasetfieldvalue set value='19.216667' where id=34774; +update datasetfieldvalue set value='19.216667' where id=34777; +update datasetfieldvalue set value='69.583333' where id=34791; +update datasetfieldvalue set value='19.216667' where id=34792; +update datasetfieldvalue set value='69.583333' where id=34804; +update datasetfieldvalue set value='19.216667' where id=34807; +update datasetfieldvalue set value='19.216667' where id=34836; +update datasetfieldvalue set value='69.583333' where id=34845; +update datasetfieldvalue set value='69.583333' where id=34861; +update datasetfieldvalue set value='19.216667' where id=34864; +update datasetfieldvalue set value='19.216667' where id=34871; +update datasetfieldvalue set value='69.583333' where id=34879; +update datasetfieldvalue set value='19.216667' where id=34891; +update datasetfieldvalue set value='69.583333' where id=34903; +update datasetfieldvalue set value='69.583333' where id=34919; +update datasetfieldvalue set value='19.216667' where id=34934; +update datasetfieldvalue set value='19.216667' where id=34941; +update datasetfieldvalue set value='69.583333' where id=34944; +update datasetfieldvalue set value='69.583333' where id=34959; +update datasetfieldvalue set value='69.583333' where id=34962; +update datasetfieldvalue set value='19.216667' where id=34969; +update datasetfieldvalue set value='19.216667' where id=34992; +update datasetfieldvalue set value='19.216667' where id=34993; +update datasetfieldvalue set value='19.216667' where id=34995; +update datasetfieldvalue set value='69.583333' where id=35002; +update datasetfieldvalue set value='69.583333' where id=35032; +update datasetfieldvalue set value='19.216667' where id=35038; +update datasetfieldvalue set value='69.583333' where id=35055; +update datasetfieldvalue set value='69.583333' where id=35062; +update datasetfieldvalue set value='19.216667' where id=35063; +update datasetfieldvalue set value='69.583333' where id=35078; +update datasetfieldvalue set value='19.216667' where id=35083; +update datasetfieldvalue set value='19.216667' where id=35088; +update datasetfieldvalue set value='69.583333' where id=35096; +update datasetfieldvalue set value='69.583333' where id=35145; +update datasetfieldvalue set value='69.583333' where id=35147; +update datasetfieldvalue set value='19.216667' where id=35149; +update datasetfieldvalue set value='19.216667' where id=35156; +update datasetfieldvalue set value='19.216667' where id=35173; +update datasetfieldvalue set value='69.583333' where id=35193; +update datasetfieldvalue set value='69.583333' where id=35195; +update datasetfieldvalue set value='19.216667' where id=35202; +update datasetfieldvalue set value='69.583333' where id=35207; +update datasetfieldvalue set value='19.216667' where id=35212; +update datasetfieldvalue set value='69.583333' where id=35214; +update datasetfieldvalue set value='19.216667' where id=35235; +update datasetfieldvalue set value='19.216667' where id=35267; +update datasetfieldvalue set value='69.583333' where id=35270; +update datasetfieldvalue set value='19.216667' where id=35278; +update datasetfieldvalue set value='69.583333' where id=35286; +update datasetfieldvalue set value='19.216667' where id=35292; +update datasetfieldvalue set value='69.583333' where id=35301; +update datasetfieldvalue set value='19.216667' where id=35321; +update datasetfieldvalue set value='69.583333' where id=35328; +update datasetfieldvalue set value='69.583333' where id=35340; +update datasetfieldvalue set value='19.216667' where id=35344; +update datasetfieldvalue set value='69.583333' where id=35350; +update datasetfieldvalue set value='19.216667' where id=35352; +update datasetfieldvalue set value='19.216667' where id=35380; +update datasetfieldvalue set value='69.583333' where id=35388; +update datasetfieldvalue set value='19.216667' where id=35392; +update datasetfieldvalue set value='69.583333' where id=35404; +update datasetfieldvalue set value='69.583333' where id=35425; +update datasetfieldvalue set value='19.216667' where id=35427; +update datasetfieldvalue set value='69.583333' where id=35431; +update datasetfieldvalue set value='19.216667' where id=35433; +update datasetfieldvalue set value='69.583333' where id=35474; +update datasetfieldvalue set value='19.216667' where id=35476; +update datasetfieldvalue set value='69.583333' where id=35492; +update datasetfieldvalue set value='19.216667' where id=35493; +update datasetfieldvalue set value='69.583333' where id=35499; +update datasetfieldvalue set value='69.583333' where id=35509; +update datasetfieldvalue set value='19.216667' where id=35529; +update datasetfieldvalue set value='19.216667' where id=35533; +update datasetfieldvalue set value='19.216667' where id=35546; +update datasetfieldvalue set value='69.583333' where id=35566; +update datasetfieldvalue set value='19.216667' where id=35570; +update datasetfieldvalue set value='69.583333' where id=35577; +update datasetfieldvalue set value='19.216667' where id=35598; +update datasetfieldvalue set value='69.583333' where id=35601; +update datasetfieldvalue set value='69.583333' where id=35602; +update datasetfieldvalue set value='19.216667' where id=35603; +update datasetfieldvalue set value='19.216667' where id=35627; +update datasetfieldvalue set value='69.583333' where id=35631; +update datasetfieldvalue set value='69.583333' where id=35643; +update datasetfieldvalue set value='19.216667' where id=35650; +update datasetfieldvalue set value='19.216667' where id=35666; +update datasetfieldvalue set value='69.583333' where id=35696; +update datasetfieldvalue set value='19.216667' where id=35700; +update datasetfieldvalue set value='69.583333' where id=35704; +update datasetfieldvalue set value='19.216667' where id=35711; +update datasetfieldvalue set value='19.216667' where id=35732; +update datasetfieldvalue set value='69.583333' where id=35737; +update datasetfieldvalue set value='69.583333' where id=35739; +update datasetfieldvalue set value='19.216667' where id=35757; +update datasetfieldvalue set value='69.583333' where id=35765; +update datasetfieldvalue set value='69.583333' where id=35768; +update datasetfieldvalue set value='19.216667' where id=35771; +update datasetfieldvalue set value='19.216667' where id=35802; +update datasetfieldvalue set value='19.216667' where id=35803; +update datasetfieldvalue set value='69.583333' where id=35805; +update datasetfieldvalue set value='69.583333' where id=35823; +update datasetfieldvalue set value='69.583333' where id=35838; +update datasetfieldvalue set value='19.216667' where id=35860; +update datasetfieldvalue set value='19.216667' where id=35862; +update datasetfieldvalue set value='69.583333' where id=35869; +update datasetfieldvalue set value='19.216667' where id=35876; +update datasetfieldvalue set value='69.583333' where id=35906; +update datasetfieldvalue set value='69.583333' where id=35910; +update datasetfieldvalue set value='19.216667' where id=35913; +update datasetfieldvalue set value='19.216667' where id=35924; +update datasetfieldvalue set value='69.583333' where id=35931; +update datasetfieldvalue set value='19.216667' where id=35933; +update datasetfieldvalue set value='69.583333' where id=35947; +update datasetfieldvalue set value='69.583333' where id=35961; +update datasetfieldvalue set value='19.216667' where id=35967; +update datasetfieldvalue set value='19.216667' where id=35987; +update datasetfieldvalue set value='69.583333' where id=35993; +update datasetfieldvalue set value='69.583333' where id=36012; +update datasetfieldvalue set value='19.216667' where id=36030; +update datasetfieldvalue set value='69.583333' where id=36033; +update datasetfieldvalue set value='19.216667' where id=36042; +update datasetfieldvalue set value='19.216667' where id=36048; +update datasetfieldvalue set value='69.583333' where id=36052; +update datasetfieldvalue set value='69.583333' where id=36070; +update datasetfieldvalue set value='19.216667' where id=36073; +update datasetfieldvalue set value='69.583333' where id=36086; +update datasetfieldvalue set value='19.216667' where id=36101; +update datasetfieldvalue set value='69.583333' where id=36105; +update datasetfieldvalue set value='19.216667' where id=36110; +update datasetfieldvalue set value='69.583333' where id=36151; +update datasetfieldvalue set value='19.216667' where id=36152; +update datasetfieldvalue set value='19.216667' where id=36158; +update datasetfieldvalue set value='69.583333' where id=36159; +update datasetfieldvalue set value='19.216667' where id=36170; +update datasetfieldvalue set value='19.216667' where id=36190; +update datasetfieldvalue set value='69.583333' where id=36197; +update datasetfieldvalue set value='69.583333' where id=36203; +update datasetfieldvalue set value='19.216667' where id=36219; +update datasetfieldvalue set value='69.583333' where id=36221; +update datasetfieldvalue set value='69.583333' where id=36234; +update datasetfieldvalue set value='19.216667' where id=36244; +update datasetfieldvalue set value='69.583333' where id=36266; +update datasetfieldvalue set value='19.216667' where id=36271; +update datasetfieldvalue set value='19.216667' where id=36287; +update datasetfieldvalue set value='69.583333' where id=36289; +update datasetfieldvalue set value='69.583333' where id=36318; +update datasetfieldvalue set value='19.216667' where id=36323; +update datasetfieldvalue set value='19.216667' where id=36326; +update datasetfieldvalue set value='69.583333' where id=36333; +update datasetfieldvalue set value='19.216667' where id=36346; +update datasetfieldvalue set value='69.583333' where id=36347; +update datasetfieldvalue set value='69.583333' where id=36352; +update datasetfieldvalue set value='19.216667' where id=36368; +update datasetfieldvalue set value='69.583333' where id=36408; +update datasetfieldvalue set value='19.216667' where id=36426; +update datasetfieldvalue set value='69.583333' where id=36445; +update datasetfieldvalue set value='19.216667' where id=36446; +update datasetfieldvalue set value='69.583333' where id=36455; +update datasetfieldvalue set value='19.216667' where id=36464; +update datasetfieldvalue set value='69.583333' where id=36470; +update datasetfieldvalue set value='19.216667' where id=36472; +update datasetfieldvalue set value='19.216667' where id=36505; +update datasetfieldvalue set value='19.216667' where id=36506; +update datasetfieldvalue set value='69.583333' where id=36522; +update datasetfieldvalue set value='69.583333' where id=36523; +update datasetfieldvalue set value='19.216667' where id=36533; +update datasetfieldvalue set value='19.216667' where id=36537; +update datasetfieldvalue set value='69.583333' where id=36543; +update datasetfieldvalue set value='69.583333' where id=36558; +update datasetfieldvalue set value='69.583333' where id=36576; +update datasetfieldvalue set value='19.216667' where id=36580; +update datasetfieldvalue set value='19.216667' where id=36589; +update datasetfieldvalue set value='69.583333' where id=36602; +update datasetfieldvalue set value='69.583333' where id=36631; +update datasetfieldvalue set value='19.216667' where id=36648; +update datasetfieldvalue set value='19.216667' where id=36656; +update datasetfieldvalue set value='69.583333' where id=36658; +update datasetfieldvalue set value='19.216667' where id=36662; +update datasetfieldvalue set value='19.216667' where id=36665; +update datasetfieldvalue set value='69.583333' where id=36672; +update datasetfieldvalue set value='69.583333' where id=36698; +update datasetfieldvalue set value='69.583333' where id=36703; +update datasetfieldvalue set value='19.216667' where id=36708; +update datasetfieldvalue set value='19.216667' where id=36711; +update datasetfieldvalue set value='69.583333' where id=36736; +update datasetfieldvalue set value='69.583333' where id=36744; +update datasetfieldvalue set value='19.216667' where id=36745; +update datasetfieldvalue set value='69.583333' where id=36776; +update datasetfieldvalue set value='19.216667' where id=36779; +update datasetfieldvalue set value='69.583333' where id=36791; +update datasetfieldvalue set value='19.216667' where id=36793; +update datasetfieldvalue set value='69.583333' where id=36808; +update datasetfieldvalue set value='19.216667' where id=36809; +update datasetfieldvalue set value='69.583333' where id=36837; +update datasetfieldvalue set value='19.216667' where id=36841; +update datasetfieldvalue set value='69.583333' where id=36852; +update datasetfieldvalue set value='19.216667' where id=36865; +update datasetfieldvalue set value='19.216667' where id=36872; +update datasetfieldvalue set value='19.216667' where id=36880; +update datasetfieldvalue set value='69.583333' where id=36901; +update datasetfieldvalue set value='69.583333' where id=36905; +update datasetfieldvalue set value='69.583333' where id=36924; +update datasetfieldvalue set value='19.216667' where id=36929; +update datasetfieldvalue set value='69.583333' where id=36934; +update datasetfieldvalue set value='19.216667' where id=36950; +update datasetfieldvalue set value='19.216667' where id=36959; +update datasetfieldvalue set value='69.583333' where id=36973; +update datasetfieldvalue set value='19.216667' where id=36985; +update datasetfieldvalue set value='69.583333' where id=36994; +update datasetfieldvalue set value='19.216667' where id=36999; +update datasetfieldvalue set value='19.216667' where id=37006; +update datasetfieldvalue set value='69.583333' where id=37012; +update datasetfieldvalue set value='69.583333' where id=37018; +update datasetfieldvalue set value='69.583333' where id=37039; +update datasetfieldvalue set value='19.216667' where id=37053; +update datasetfieldvalue set value='19.216667' where id=37059; +update datasetfieldvalue set value='69.583333' where id=37075; +update datasetfieldvalue set value='69.583333' where id=37079; +update datasetfieldvalue set value='19.216667' where id=37084; +update datasetfieldvalue set value='19.216667' where id=37086; +update datasetfieldvalue set value='69.583333' where id=37120; +update datasetfieldvalue set value='-2.0' where id=37232; +update datasetfieldvalue set value='-79.0' where id=37234; +update datasetfieldvalue set value='-82.0' where id=37242; +update datasetfieldvalue set value='-4.0' where id=37252; +update datasetfieldvalue set value='69.583333' where id=38269; +update datasetfieldvalue set value='69.583333' where id=38277; +update datasetfieldvalue set value='19.216667' where id=38289; +update datasetfieldvalue set value='19.216667' where id=38291; +update datasetfieldvalue set value='19.216667' where id=38305; +update datasetfieldvalue set value='69.583333' where id=38320; +update datasetfieldvalue set value='19.216667' where id=38335; +update datasetfieldvalue set value='69.583333' where id=38344; +update datasetfieldvalue set value='4.9' where id=38654; +update datasetfieldvalue set value='31.1' where id=38657; +update datasetfieldvalue set value='58.0' where id=38660; +update datasetfieldvalue set value='71.1' where id=38663; +update datasetfieldvalue set value='4.9' where id=38666; +update datasetfieldvalue set value='71.1' where id=38672; +update datasetfieldvalue set value='58.0' where id=38691; +update datasetfieldvalue set value='31.1' where id=38695; +update datasetfieldvalue set value='73.4' where id=42813; +update datasetfieldvalue set value='7.05' where id=42815; +update datasetfieldvalue set value='73.3' where id=42816; +update datasetfieldvalue set value='8.25' where id=42818; +update datasetfieldvalue set value='7.05' where id=43296; +update datasetfieldvalue set value='73.3' where id=43322; +update datasetfieldvalue set value='8.25' where id=43333; +update datasetfieldvalue set value='73.4' where id=43339; +update datasetfieldvalue set value='58.0' where id=45387; +update datasetfieldvalue set value='18.0' where id=45391; +update datasetfieldvalue set value='11.0' where id=45394; +update datasetfieldvalue set value='62.0' where id=45395; +update datasetfieldvalue set value='18.0' where id=45467; +update datasetfieldvalue set value='58.0' where id=45470; +update datasetfieldvalue set value='11.0' where id=45471; +update datasetfieldvalue set value='62.0' where id=45487; +update datasetfieldvalue set value='73.4' where id=46004; +update datasetfieldvalue set value='7.05' where id=46021; +update datasetfieldvalue set value='73.3' where id=46023; +update datasetfieldvalue set value='8.25' where id=46039; +update datasetfieldvalue set value='38.93792' where id=47525; +update datasetfieldvalue set value='110.13892' where id=47531; +update datasetfieldvalue set value='82.0' where id=47779; +update datasetfieldvalue set value='72.0' where id=47809; +update datasetfieldvalue set value='69.583333' where id=56318; +update datasetfieldvalue set value='19.216667' where id=56331; +update datasetfieldvalue set value='69.583333' where id=56339; +update datasetfieldvalue set value='19.216667' where id=56340; +update datasetfieldvalue set value='5.5' where id=56853; +update datasetfieldvalue set value='78.5' where id=56861; +update datasetfieldvalue set value='78.3' where id=56869; +update datasetfieldvalue set value='60.630' where id=58455; +update datasetfieldvalue set value='7.596' where id=58456; +update datasetfieldvalue set value='7.694' where id=58461; +update datasetfieldvalue set value='60.545' where id=58462; +update datasetfieldvalue set value='7.694' where id=60082; +update datasetfieldvalue set value='60.545' where id=60083; +update datasetfieldvalue set value='7.596' where id=60097; +update datasetfieldvalue set value='60.630' where id=60098; +update datasetfieldvalue set value='158' where id=60706; +update datasetfieldvalue set value='74' where id=60707; +update datasetfieldvalue set value='31.2' where id=60779; +update datasetfieldvalue set value='71.2' where id=60798; +update datasetfieldvalue set value='4.5' where id=60810; +update datasetfieldvalue set value='57.9' where id=60820; +update datasetfieldvalue set value='48.46494' where id=62442; +update datasetfieldvalue set value='48.02265' where id=62445; +update datasetfieldvalue set value='66.027306' where id=63491; +update datasetfieldvalue set value='16.467778' where id=63495; +update datasetfieldvalue set value='16.45' where id=63714; +update datasetfieldvalue set value='66.016667' where id=63715; +update datasetfieldvalue set value='78.656806' where id=64995; +update datasetfieldvalue set value='16.435583' where id=64997; +update datasetfieldvalue set value='16.405389' where id=64998; +update datasetfieldvalue set value='78.659556' where id=64999; +update datasetfieldvalue set value='14.344' where id=75142; +update datasetfieldvalue set value='14.866' where id=75152; +update datasetfieldvalue set value='68.950' where id=75164; +update datasetfieldvalue set value='69.026' where id=75165; +update datasetfieldvalue set value='59.663056' where id=75211; +update datasetfieldvalue set value='10.761667' where id=75227; +update datasetfieldvalue set value='69.026' where id=75885; +update datasetfieldvalue set value='14.344' where id=75942; +update datasetfieldvalue set value='68.950' where id=75953; +update datasetfieldvalue set value='14.866' where id=75961; +update datasetfieldvalue set value='78.5' where id=76462; +update datasetfieldvalue set value='5.5' where id=76463; +update datasetfieldvalue set value='78.3' where id=76495; +update datasetfieldvalue set value='76.147778' where id=76803; +update datasetfieldvalue set value='15.824444' where id=76805; +update datasetfieldvalue set value='76.084167' where id=76808; +update datasetfieldvalue set value='16.148889' where id=76811; +update datasetfieldvalue set value='60.545' where id=79802; +update datasetfieldvalue set value='7.596' where id=79811; +update datasetfieldvalue set value='60.630' where id=79820; +update datasetfieldvalue set value='7.694' where id=79824; +update datasetfieldvalue set value='76.2' where id=81254; +update datasetfieldvalue set value='59.663056' where id=82117; +update datasetfieldvalue set value='39.466667' where id=82119; +update datasetfieldvalue set value='12.366667' where id=82123; +update datasetfieldvalue set value='10.761667' where id=82202; +update datasetfieldvalue set value='59.663056' where id=82645; +update datasetfieldvalue set value='10.761667' where id=82646; +update datasetfieldvalue set value='71.38' where id=82923; +update datasetfieldvalue set value='4.09' where id=82924; +update datasetfieldvalue set value='31.76' where id=82926; +update datasetfieldvalue set value='57.76' where id=82927; +update datasetfieldvalue set value='64' where id=86065; +update datasetfieldvalue set value='62' where id=86078; +update datasetfieldvalue set value='9' where id=86083; +update datasetfieldvalue set value='6' where id=86084; +update datasetfieldvalue set value='71.38' where id=87035; +update datasetfieldvalue set value='31.76' where id=87037; +update datasetfieldvalue set value='4.09' where id=87047; +update datasetfieldvalue set value='57.76' where id=87064; +update datasetfieldvalue set value='12.3' where id=87431; +update datasetfieldvalue set value='57.5' where id=87434; +update datasetfieldvalue set value='11.5' where id=87435; +update datasetfieldvalue set value='57.9' where id=87436; +update datasetfieldvalue set value='82.86' where id=87443; +update datasetfieldvalue set value='82.92' where id=87444; +update datasetfieldvalue set value='6.36' where id=87445; +update datasetfieldvalue set value='6.12' where id=87446; +update datasetfieldvalue set value='16.435583' where id=88079; +update datasetfieldvalue set value='78.656806' where id=88091; +update datasetfieldvalue set value='16.405389' where id=88099; +update datasetfieldvalue set value='78.659556' where id=88108; +update datasetfieldvalue set value='31.76' where id=88439; +update datasetfieldvalue set value='71.38' where id=88454; +update datasetfieldvalue set value='57.76' where id=88460; +update datasetfieldvalue set value='4.09' where id=88478; +update datasetfieldvalue set value='31.76' where id=88516; +update datasetfieldvalue set value='4.09' where id=88530; +update datasetfieldvalue set value='57.76' where id=88531; +update datasetfieldvalue set value='71.38' where id=88533; +update datasetfieldvalue set value='50.80' where id=89483; +update datasetfieldvalue set value='-40' where id=89484; +update datasetfieldvalue set value='69.52' where id=89732; +update datasetfieldvalue set value='19.10' where id=89733; +update datasetfieldvalue set value='69.55' where id=89740; +update datasetfieldvalue set value='19.01' where id=89747; +update datasetfieldvalue set value='57.76' where id=91572; +update datasetfieldvalue set value='31.76' where id=91577; +update datasetfieldvalue set value='4.09' where id=91590; +update datasetfieldvalue set value='71.38' where id=91606; +update datasetfieldvalue set value='59.663056' where id=97274; +update datasetfieldvalue set value='10.761667' where id=97276; +update datasetfieldvalue set value='68.82' where id=97521; +update datasetfieldvalue set value='68.82' where id=97522; +update datasetfieldvalue set value='16.48' where id=97523; +update datasetfieldvalue set value='16.48' where id=97524; +update datasetfieldvalue set value='60.20' where id=97539; +update datasetfieldvalue set value='60.53' where id=97540; +update datasetfieldvalue set value='5.69' where id=97541; +update datasetfieldvalue set value='5.17' where id=97542; +update datasetfieldvalue set value='59.663056' where id=100347; +update datasetfieldvalue set value='10.761667' where id=100363; +update datasetfieldvalue set value='59.0636' where id=102440; +update datasetfieldvalue set value='9.8349' where id=102443; +update datasetfieldvalue set value='69.583333' where id=103825; +update datasetfieldvalue set value='19.216667' where id=103830; +update datasetfieldvalue set value='69.583333' where id=103833; +update datasetfieldvalue set value='19.216667' where id=103847; +update datasetfieldvalue set value='19.216667' where id=103867; +update datasetfieldvalue set value='69.583333' where id=103885; +update datasetfieldvalue set value='69.583333' where id=103894; +update datasetfieldvalue set value='19.216667' where id=103896; +update datasetfieldvalue set value='69.583333' where id=103906; +update datasetfieldvalue set value='69.583333' where id=103908; +update datasetfieldvalue set value='19.216667' where id=103921; +update datasetfieldvalue set value='19.216667' where id=103932; +update datasetfieldvalue set value='69.583333' where id=103945; +update datasetfieldvalue set value='19.216667' where id=103947; +update datasetfieldvalue set value='69.583333' where id=103951; +update datasetfieldvalue set value='19.216667' where id=103972; +update datasetfieldvalue set value='19.216667' where id=103994; +update datasetfieldvalue set value='69.583333' where id=104005; +update datasetfieldvalue set value='19.216667' where id=104007; +update datasetfieldvalue set value='69.583333' where id=104024; +update datasetfieldvalue set value='69.583333' where id=104032; +update datasetfieldvalue set value='69.583333' where id=104037; +update datasetfieldvalue set value='19.216667' where id=104048; +update datasetfieldvalue set value='19.216667' where id=104065; +update datasetfieldvalue set value='69.583333' where id=104073; +update datasetfieldvalue set value='19.216667' where id=104088; +update datasetfieldvalue set value='69.583333' where id=104091; +update datasetfieldvalue set value='19.216667' where id=104095; +update datasetfieldvalue set value='19.216667' where id=104114; +update datasetfieldvalue set value='69.583333' where id=104128; +update datasetfieldvalue set value='69.583333' where id=104139; +update datasetfieldvalue set value='19.216667' where id=104147; +update datasetfieldvalue set value='19.216667' where id=104158; +update datasetfieldvalue set value='69.583333' where id=104164; +update datasetfieldvalue set value='69.583333' where id=104167; +update datasetfieldvalue set value='19.216667' where id=104175; +update datasetfieldvalue set value='69.583333' where id=104196; +update datasetfieldvalue set value='19.216667' where id=104201; +update datasetfieldvalue set value='19.216667' where id=104206; +update datasetfieldvalue set value='69.583333' where id=104217; +update datasetfieldvalue set value='69.583333' where id=104238; +update datasetfieldvalue set value='69.583333' where id=104255; +update datasetfieldvalue set value='19.216667' where id=104257; +update datasetfieldvalue set value='19.216667' where id=104262; +update datasetfieldvalue set value='69.583333' where id=104282; +update datasetfieldvalue set value='19.216667' where id=104284; +update datasetfieldvalue set value='69.583333' where id=104291; +update datasetfieldvalue set value='19.216667' where id=104295; +update datasetfieldvalue set value='69.583333' where id=104327; +update datasetfieldvalue set value='19.216667' where id=104328; +update datasetfieldvalue set value='69.583333' where id=104331; +update datasetfieldvalue set value='19.216667' where id=104350; +update datasetfieldvalue set value='69.583333' where id=104376; +update datasetfieldvalue set value='69.583333' where id=104379; +update datasetfieldvalue set value='19.216667' where id=104389; +update datasetfieldvalue set value='19.216667' where id=104401; +update datasetfieldvalue set value='69.583333' where id=104407; +update datasetfieldvalue set value='19.216667' where id=104418; +update datasetfieldvalue set value='69.583333' where id=104434; +update datasetfieldvalue set value='19.216667' where id=104441; +update datasetfieldvalue set value='19.216667' where id=104456; +update datasetfieldvalue set value='19.216667' where id=104462; +update datasetfieldvalue set value='69.583333' where id=104468; +update datasetfieldvalue set value='69.583333' where id=104475; +update datasetfieldvalue set value='19.216667' where id=104503; +update datasetfieldvalue set value='69.583333' where id=104512; +update datasetfieldvalue set value='19.216667' where id=104521; +update datasetfieldvalue set value='69.583333' where id=104522; +update datasetfieldvalue set value='69.583333' where id=104540; +update datasetfieldvalue set value='19.216667' where id=104546; +update datasetfieldvalue set value='69.583333' where id=104552; +update datasetfieldvalue set value='19.216667' where id=104560; +update datasetfieldvalue set value='19.216667' where id=104588; +update datasetfieldvalue set value='69.583333' where id=104591; +update datasetfieldvalue set value='19.216667' where id=104594; +update datasetfieldvalue set value='69.583333' where id=104609; +update datasetfieldvalue set value='69.583333' where id=104617; +update datasetfieldvalue set value='19.216667' where id=104630; +update datasetfieldvalue set value='69.583333' where id=104632; +update datasetfieldvalue set value='19.216667' where id=104635; +update datasetfieldvalue set value='69.583333' where id=104661; +update datasetfieldvalue set value='19.216667' where id=104683; +update datasetfieldvalue set value='69.583333' where id=104686; +update datasetfieldvalue set value='19.216667' where id=104698; +update datasetfieldvalue set value='19.216667' where id=104705; +update datasetfieldvalue set value='69.583333' where id=104706; +update datasetfieldvalue set value='19.216667' where id=104721; +update datasetfieldvalue set value='69.583333' where id=104735; +update datasetfieldvalue set value='19.216667' where id=104758; +update datasetfieldvalue set value='69.583333' where id=104760; +update datasetfieldvalue set value='69.583333' where id=104765; +update datasetfieldvalue set value='19.216667' where id=104781; +update datasetfieldvalue set value='69.583333' where id=104787; +update datasetfieldvalue set value='19.216667' where id=104788; +update datasetfieldvalue set value='19.216667' where id=104804; +update datasetfieldvalue set value='69.583333' where id=104809; +update datasetfieldvalue set value='19.216667' where id=104833; +update datasetfieldvalue set value='69.583333' where id=104834; +update datasetfieldvalue set value='19.216667' where id=104855; +update datasetfieldvalue set value='69.583333' where id=104867; +update datasetfieldvalue set value='69.583333' where id=104874; +update datasetfieldvalue set value='19.216667' where id=104878; +update datasetfieldvalue set value='19.216667' where id=104895; +update datasetfieldvalue set value='69.583333' where id=104904; +update datasetfieldvalue set value='19.216667' where id=104918; +update datasetfieldvalue set value='19.216667' where id=104920; +update datasetfieldvalue set value='69.583333' where id=104929; +update datasetfieldvalue set value='69.583333' where id=104942; +update datasetfieldvalue set value='69.583333' where id=104964; +update datasetfieldvalue set value='19.216667' where id=104984; +update datasetfieldvalue set value='69.583333' where id=104989; +update datasetfieldvalue set value='19.216667' where id=104992; +update datasetfieldvalue set value='69.583333' where id=105000; +update datasetfieldvalue set value='69.583333' where id=105017; +update datasetfieldvalue set value='19.216667' where id=105021; +update datasetfieldvalue set value='19.216667' where id=105033; +update datasetfieldvalue set value='69.583333' where id=105058; +update datasetfieldvalue set value='69.583333' where id=105068; +update datasetfieldvalue set value='19.216667' where id=105070; +update datasetfieldvalue set value='19.216667' where id=105076; +update datasetfieldvalue set value='69.583333' where id=105087; +update datasetfieldvalue set value='19.216667' where id=105092; +update datasetfieldvalue set value='19.216667' where id=105104; +update datasetfieldvalue set value='69.583333' where id=105116; +update datasetfieldvalue set value='69.583333' where id=105124; +update datasetfieldvalue set value='19.216667' where id=105127; +update datasetfieldvalue set value='19.216667' where id=105143; +update datasetfieldvalue set value='69.583333' where id=105153; +update datasetfieldvalue set value='69.583333' where id=105182; +update datasetfieldvalue set value='19.216667' where id=105186; +update datasetfieldvalue set value='19.216667' where id=105187; +update datasetfieldvalue set value='69.583333' where id=105199; +update datasetfieldvalue set value='69.583333' where id=105216; +update datasetfieldvalue set value='19.216667' where id=105222; +update datasetfieldvalue set value='19.216667' where id=105232; +update datasetfieldvalue set value='69.583333' where id=105239; +update datasetfieldvalue set value='19.216667' where id=105262; +update datasetfieldvalue set value='69.583333' where id=105274; +update datasetfieldvalue set value='69.583333' where id=105275; +update datasetfieldvalue set value='19.216667' where id=105278; +update datasetfieldvalue set value='69.583333' where id=105292; +update datasetfieldvalue set value='19.216667' where id=105294; +update datasetfieldvalue set value='69.583333' where id=105299; +update datasetfieldvalue set value='19.216667' where id=105319; +update datasetfieldvalue set value='69.583333' where id=105331; +update datasetfieldvalue set value='19.216667' where id=105336; +update datasetfieldvalue set value='19.216667' where id=105347; +update datasetfieldvalue set value='69.583333' where id=105362; +update datasetfieldvalue set value='69.583333' where id=105373; +update datasetfieldvalue set value='19.216667' where id=105379; +update datasetfieldvalue set value='69.583333' where id=105381; +update datasetfieldvalue set value='19.216667' where id=105390; +update datasetfieldvalue set value='19.216667' where id=105427; +update datasetfieldvalue set value='19.216667' where id=105432; +update datasetfieldvalue set value='69.583333' where id=105434; +update datasetfieldvalue set value='69.583333' where id=105454; +update datasetfieldvalue set value='19.216667' where id=105467; +update datasetfieldvalue set value='69.583333' where id=105468; +update datasetfieldvalue set value='69.583333' where id=105474; +update datasetfieldvalue set value='19.216667' where id=105475; +update datasetfieldvalue set value='69.583333' where id=105501; +update datasetfieldvalue set value='69.583333' where id=105506; +update datasetfieldvalue set value='19.216667' where id=105509; +update datasetfieldvalue set value='19.216667' where id=105520; +update datasetfieldvalue set value='69.583333' where id=105545; +update datasetfieldvalue set value='19.216667' where id=105546; +update datasetfieldvalue set value='19.216667' where id=105568; +update datasetfieldvalue set value='69.583333' where id=105570; +update datasetfieldvalue set value='19.216667' where id=105588; +update datasetfieldvalue set value='69.583333' where id=105606; +update datasetfieldvalue set value='69.583333' where id=105617; +update datasetfieldvalue set value='19.216667' where id=105622; +update datasetfieldvalue set value='19.216667' where id=105629; +update datasetfieldvalue set value='69.583333' where id=105648; +update datasetfieldvalue set value='69.583333' where id=105649; +update datasetfieldvalue set value='19.216667' where id=105653; +update datasetfieldvalue set value='69.583333' where id=105682; +update datasetfieldvalue set value='69.583333' where id=105693; +update datasetfieldvalue set value='19.216667' where id=105694; +update datasetfieldvalue set value='19.216667' where id=105707; +update datasetfieldvalue set value='69.583333' where id=105740; +update datasetfieldvalue set value='19.216667' where id=105741; +update datasetfieldvalue set value='69.583333' where id=105743; +update datasetfieldvalue set value='19.216667' where id=105744; +update datasetfieldvalue set value='19.216667' where id=105754; +update datasetfieldvalue set value='69.583333' where id=105761; +update datasetfieldvalue set value='69.583333' where id=105773; +update datasetfieldvalue set value='19.216667' where id=105785; +update datasetfieldvalue set value='69.583333' where id=105794; +update datasetfieldvalue set value='19.216667' where id=105811; +update datasetfieldvalue set value='69.583333' where id=105827; +update datasetfieldvalue set value='19.216667' where id=105831; +update datasetfieldvalue set value='19.216667' where id=105856; +update datasetfieldvalue set value='69.583333' where id=105868; +update datasetfieldvalue set value='69.583333' where id=105870; +update datasetfieldvalue set value='19.216667' where id=105872; +update datasetfieldvalue set value='69.583333' where id=105888; +update datasetfieldvalue set value='19.216667' where id=105899; +update datasetfieldvalue set value='69.583333' where id=105900; +update datasetfieldvalue set value='19.216667' where id=105902; +update datasetfieldvalue set value='69.583333' where id=105932; +update datasetfieldvalue set value='19.216667' where id=105944; +update datasetfieldvalue set value='69.583333' where id=105947; +update datasetfieldvalue set value='19.216667' where id=105959; +update datasetfieldvalue set value='19.216667' where id=105972; +update datasetfieldvalue set value='69.583333' where id=105978; +update datasetfieldvalue set value='69.583333' where id=105986; +update datasetfieldvalue set value='19.216667' where id=106001; +update datasetfieldvalue set value='19.216667' where id=106009; +update datasetfieldvalue set value='69.583333' where id=106013; +update datasetfieldvalue set value='19.216667' where id=106024; +update datasetfieldvalue set value='69.583333' where id=106040; +update datasetfieldvalue set value='69.583333' where id=106048; +update datasetfieldvalue set value='19.216667' where id=106052; +update datasetfieldvalue set value='69.583333' where id=106054; +update datasetfieldvalue set value='19.216667' where id=106080; +update datasetfieldvalue set value='19.216667' where id=106100; +update datasetfieldvalue set value='69.583333' where id=106105; +update datasetfieldvalue set value='69.583333' where id=106108; +update datasetfieldvalue set value='19.216667' where id=106110; +update datasetfieldvalue set value='69.583333' where id=106150; +update datasetfieldvalue set value='19.216667' where id=106152; +update datasetfieldvalue set value='69.583333' where id=106153; +update datasetfieldvalue set value='19.216667' where id=106157; +update datasetfieldvalue set value='19.216667' where id=106171; +update datasetfieldvalue set value='69.583333' where id=106182; +update datasetfieldvalue set value='19.216667' where id=106185; +update datasetfieldvalue set value='69.583333' where id=106191; +update datasetfieldvalue set value='19.216667' where id=106230; +update datasetfieldvalue set value='69.583333' where id=106235; +update datasetfieldvalue set value='69.583333' where id=106246; +update datasetfieldvalue set value='19.216667' where id=106250; +update datasetfieldvalue set value='69.583333' where id=106259; +update datasetfieldvalue set value='19.216667' where id=106262; +update datasetfieldvalue set value='19.216667' where id=106280; +update datasetfieldvalue set value='69.583333' where id=106285; +update datasetfieldvalue set value='19.216667' where id=106298; +update datasetfieldvalue set value='69.583333' where id=106306; +update datasetfieldvalue set value='19.216667' where id=106309; +update datasetfieldvalue set value='69.583333' where id=106335; +update datasetfieldvalue set value='69.583333' where id=106349; +update datasetfieldvalue set value='69.583333' where id=106353; +update datasetfieldvalue set value='19.216667' where id=106366; +update datasetfieldvalue set value='19.216667' where id=106368; +update datasetfieldvalue set value='69.583333' where id=106383; +update datasetfieldvalue set value='19.216667' where id=106384; +update datasetfieldvalue set value='69.583333' where id=106396; +update datasetfieldvalue set value='19.216667' where id=106409; +update datasetfieldvalue set value='19.216667' where id=106426; +update datasetfieldvalue set value='19.216667' where id=106443; +update datasetfieldvalue set value='69.583333' where id=106447; +update datasetfieldvalue set value='69.583333' where id=106461; +update datasetfieldvalue set value='69.583333' where id=106466; +update datasetfieldvalue set value='19.216667' where id=106474; +update datasetfieldvalue set value='19.216667' where id=106482; +update datasetfieldvalue set value='69.583333' where id=106497; +update datasetfieldvalue set value='69.583333' where id=106514; +update datasetfieldvalue set value='19.216667' where id=106521; +update datasetfieldvalue set value='69.583333' where id=106529; +update datasetfieldvalue set value='19.216667' where id=106533; +update datasetfieldvalue set value='19.216667' where id=106550; +update datasetfieldvalue set value='69.583333' where id=106559; +update datasetfieldvalue set value='69.583333' where id=106576; +update datasetfieldvalue set value='19.216667' where id=106579; +update datasetfieldvalue set value='69.583333' where id=106601; +update datasetfieldvalue set value='19.216667' where id=106605; +update datasetfieldvalue set value='69.583333' where id=106614; +update datasetfieldvalue set value='19.216667' where id=106631; +update datasetfieldvalue set value='69.583333' where id=106636; +update datasetfieldvalue set value='19.216667' where id=106644; +update datasetfieldvalue set value='19.216667' where id=106652; +update datasetfieldvalue set value='69.583333' where id=106655; +update datasetfieldvalue set value='19.216667' where id=106675; +update datasetfieldvalue set value='69.583333' where id=106684; +update datasetfieldvalue set value='19.216667' where id=106692; +update datasetfieldvalue set value='69.583333' where id=106709; +update datasetfieldvalue set value='19.216667' where id=106724; +update datasetfieldvalue set value='19.216667' where id=106726; +update datasetfieldvalue set value='69.583333' where id=106729; +update datasetfieldvalue set value='69.583333' where id=106745; +update datasetfieldvalue set value='69.583333' where id=106773; +update datasetfieldvalue set value='69.583333' where id=106776; +update datasetfieldvalue set value='19.216667' where id=106781; +update datasetfieldvalue set value='19.216667' where id=106799; +update datasetfieldvalue set value='69.583333' where id=106812; +update datasetfieldvalue set value='69.583333' where id=106813; +update datasetfieldvalue set value='19.216667' where id=106814; +update datasetfieldvalue set value='19.216667' where id=106831; +update datasetfieldvalue set value='19.216667' where id=106848; +update datasetfieldvalue set value='69.583333' where id=106852; +update datasetfieldvalue set value='69.583333' where id=106869; +update datasetfieldvalue set value='19.216667' where id=106882; +update datasetfieldvalue set value='69.583333' where id=106886; +update datasetfieldvalue set value='19.216667' where id=106893; +update datasetfieldvalue set value='69.583333' where id=106901; +update datasetfieldvalue set value='19.216667' where id=106906; +update datasetfieldvalue set value='19.216667' where id=106927; +update datasetfieldvalue set value='69.583333' where id=106935; +update datasetfieldvalue set value='69.583333' where id=106939; +update datasetfieldvalue set value='19.216667' where id=106965; +update datasetfieldvalue set value='19.216667' where id=106972; +update datasetfieldvalue set value='69.583333' where id=106988; +update datasetfieldvalue set value='69.583333' where id=106997; +update datasetfieldvalue set value='19.216667' where id=107001; +update datasetfieldvalue set value='19.216667' where id=107019; +update datasetfieldvalue set value='19.216667' where id=107025; +update datasetfieldvalue set value='69.583333' where id=107030; +update datasetfieldvalue set value='69.583333' where id=107047; +update datasetfieldvalue set value='69.583333' where id=107054; +update datasetfieldvalue set value='19.216667' where id=107076; +update datasetfieldvalue set value='19.216667' where id=107077; +update datasetfieldvalue set value='69.583333' where id=107081; +update datasetfieldvalue set value='69.583333' where id=107108; +update datasetfieldvalue set value='19.216667' where id=107111; +update datasetfieldvalue set value='19.216667' where id=107112; +update datasetfieldvalue set value='69.583333' where id=107115; +update datasetfieldvalue set value='69.583333' where id=107138; +update datasetfieldvalue set value='19.216667' where id=107142; +update datasetfieldvalue set value='69.583333' where id=107145; +update datasetfieldvalue set value='19.216667' where id=107167; +update datasetfieldvalue set value='69.583333' where id=107198; +update datasetfieldvalue set value='19.216667' where id=107200; +update datasetfieldvalue set value='19.216667' where id=107210; +update datasetfieldvalue set value='69.583333' where id=107214; +update datasetfieldvalue set value='19.216667' where id=107226; +update datasetfieldvalue set value='19.216667' where id=107233; +update datasetfieldvalue set value='69.583333' where id=107240; +update datasetfieldvalue set value='69.583333' where id=107253; +update datasetfieldvalue set value='19.216667' where id=107267; +update datasetfieldvalue set value='69.583333' where id=107290; +update datasetfieldvalue set value='19.216667' where id=107293; +update datasetfieldvalue set value='69.583333' where id=107296; +update datasetfieldvalue set value='19.216667' where id=107312; +update datasetfieldvalue set value='69.583333' where id=107313; +update datasetfieldvalue set value='19.216667' where id=107327; +update datasetfieldvalue set value='69.583333' where id=107338; +update datasetfieldvalue set value='69.583333' where id=107349; +update datasetfieldvalue set value='19.216667' where id=107350; +update datasetfieldvalue set value='19.216667' where id=107372; +update datasetfieldvalue set value='69.583333' where id=107377; +update datasetfieldvalue set value='69.583333' where id=107403; +update datasetfieldvalue set value='19.216667' where id=107410; +update datasetfieldvalue set value='19.216667' where id=107413; +update datasetfieldvalue set value='69.583333' where id=107419; +update datasetfieldvalue set value='69.583333' where id=107439; +update datasetfieldvalue set value='69.583333' where id=107445; +update datasetfieldvalue set value='19.216667' where id=107450; +update datasetfieldvalue set value='19.216667' where id=107465; +update datasetfieldvalue set value='19.216667' where id=107481; +update datasetfieldvalue set value='19.216667' where id=107483; +update datasetfieldvalue set value='69.583333' where id=107496; +update datasetfieldvalue set value='69.583333' where id=107505; +update datasetfieldvalue set value='69.583333' where id=107518; +update datasetfieldvalue set value='19.216667' where id=107523; +update datasetfieldvalue set value='69.583333' where id=107543; +update datasetfieldvalue set value='19.216667' where id=107548; +update datasetfieldvalue set value='19.216667' where id=107560; +update datasetfieldvalue set value='19.216667' where id=107568; +update datasetfieldvalue set value='69.583333' where id=107571; +update datasetfieldvalue set value='69.583333' where id=107572; +update datasetfieldvalue set value='19.216667' where id=107601; +update datasetfieldvalue set value='69.583333' where id=107618; +update datasetfieldvalue set value='69.583333' where id=107629; +update datasetfieldvalue set value='19.216667' where id=107637; +update datasetfieldvalue set value='69.583333' where id=107644; +update datasetfieldvalue set value='19.216667' where id=107653; +update datasetfieldvalue set value='69.583333' where id=107654; +update datasetfieldvalue set value='19.216667' where id=107679; +update datasetfieldvalue set value='69.583333' where id=107684; +update datasetfieldvalue set value='19.216667' where id=107692; +update datasetfieldvalue set value='19.216667' where id=107695; +update datasetfieldvalue set value='69.583333' where id=107706; +update datasetfieldvalue set value='19.216667' where id=107725; +update datasetfieldvalue set value='69.583333' where id=107727; +update datasetfieldvalue set value='19.216667' where id=107750; +update datasetfieldvalue set value='69.583333' where id=107759; +update datasetfieldvalue set value='69.583333' where id=107768; +update datasetfieldvalue set value='19.216667' where id=107771; +update datasetfieldvalue set value='69.583333' where id=107803; +update datasetfieldvalue set value='19.216667' where id=107804; +update datasetfieldvalue set value='69.583333' where id=107813; +update datasetfieldvalue set value='69.583333' where id=107816; +update datasetfieldvalue set value='19.216667' where id=107818; +update datasetfieldvalue set value='19.216667' where id=107824; +update datasetfieldvalue set value='69.583333' where id=107860; +update datasetfieldvalue set value='19.216667' where id=107864; +update datasetfieldvalue set value='19.216667' where id=107875; +update datasetfieldvalue set value='69.583333' where id=107890; +update datasetfieldvalue set value='69.583333' where id=107908; +update datasetfieldvalue set value='19.216667' where id=107920; +update datasetfieldvalue set value='19.216667' where id=107923; +update datasetfieldvalue set value='69.583333' where id=107933; +update datasetfieldvalue set value='69.583333' where id=107936; +update datasetfieldvalue set value='19.216667' where id=107939; +update datasetfieldvalue set value='19.216667' where id=107941; +update datasetfieldvalue set value='69.583333' where id=107968; +update datasetfieldvalue set value='19.216667' where id=107983; +update datasetfieldvalue set value='69.583333' where id=107990; +update datasetfieldvalue set value='69.583333' where id=108000; +update datasetfieldvalue set value='19.216667' where id=108017; +update datasetfieldvalue set value='19.216667' where id=108037; +update datasetfieldvalue set value='19.216667' where id=108039; +update datasetfieldvalue set value='69.583333' where id=108045; +update datasetfieldvalue set value='69.583333' where id=108051; +update datasetfieldvalue set value='19.216667' where id=108062; +update datasetfieldvalue set value='19.216667' where id=108063; +update datasetfieldvalue set value='69.583333' where id=108086; +update datasetfieldvalue set value='69.583333' where id=108100; +update datasetfieldvalue set value='69.583333' where id=108124; +update datasetfieldvalue set value='19.216667' where id=108129; +update datasetfieldvalue set value='19.216667' where id=108131; +update datasetfieldvalue set value='69.583333' where id=108140; +update datasetfieldvalue set value='19.216667' where id=108150; +update datasetfieldvalue set value='69.583333' where id=108152; +update datasetfieldvalue set value='19.216667' where id=108175; +update datasetfieldvalue set value='69.583333' where id=108185; +update datasetfieldvalue set value='69.583333' where id=108186; +update datasetfieldvalue set value='19.216667' where id=108190; +update datasetfieldvalue set value='69.583333' where id=108196; +update datasetfieldvalue set value='19.216667' where id=108223; +update datasetfieldvalue set value='19.216667' where id=108238; +update datasetfieldvalue set value='69.583333' where id=108242; +update datasetfieldvalue set value='69.583333' where id=108258; +update datasetfieldvalue set value='19.216667' where id=108267; +update datasetfieldvalue set value='69.583333' where id=108274; +update datasetfieldvalue set value='19.216667' where id=108285; +update datasetfieldvalue set value='19.216667' where id=108303; +update datasetfieldvalue set value='69.583333' where id=108304; +update datasetfieldvalue set value='19.216667' where id=108326; +update datasetfieldvalue set value='69.583333' where id=108329; +update datasetfieldvalue set value='69.583333' where id=108330; +update datasetfieldvalue set value='19.216667' where id=108336; +update datasetfieldvalue set value='19.216667' where id=108367; +update datasetfieldvalue set value='69.583333' where id=108378; +update datasetfieldvalue set value='69.583333' where id=108383; +update datasetfieldvalue set value='19.216667' where id=108395; +update datasetfieldvalue set value='69.583333' where id=108405; +update datasetfieldvalue set value='69.583333' where id=108410; +update datasetfieldvalue set value='19.216667' where id=108425; +update datasetfieldvalue set value='19.216667' where id=108429; +update datasetfieldvalue set value='19.216667' where id=108440; +update datasetfieldvalue set value='69.583333' where id=108443; +update datasetfieldvalue set value='19.216667' where id=108452; +update datasetfieldvalue set value='69.583333' where id=108468; +update datasetfieldvalue set value='19.216667' where id=108482; +update datasetfieldvalue set value='19.216667' where id=108487; +update datasetfieldvalue set value='69.583333' where id=108495; +update datasetfieldvalue set value='69.583333' where id=108520; +update datasetfieldvalue set value='19.216667' where id=108523; +update datasetfieldvalue set value='69.583333' where id=108536; +update datasetfieldvalue set value='19.216667' where id=108549; +update datasetfieldvalue set value='69.583333' where id=108558; +update datasetfieldvalue set value='69.583333' where id=108572; +update datasetfieldvalue set value='19.216667' where id=108587; +update datasetfieldvalue set value='19.216667' where id=108594; +update datasetfieldvalue set value='69.583333' where id=108602; +update datasetfieldvalue set value='19.216667' where id=108614; +update datasetfieldvalue set value='69.583333' where id=108636; +update datasetfieldvalue set value='19.216667' where id=108639; +update datasetfieldvalue set value='69.583333' where id=108645; +update datasetfieldvalue set value='19.216667' where id=108653; +update datasetfieldvalue set value='69.583333' where id=108654; +update datasetfieldvalue set value='69.583333' where id=108672; +update datasetfieldvalue set value='19.216667' where id=108674; +update datasetfieldvalue set value='69.583333' where id=108691; +update datasetfieldvalue set value='19.216667' where id=108712; +update datasetfieldvalue set value='69.583333' where id=108717; +update datasetfieldvalue set value='19.216667' where id=108723; +update datasetfieldvalue set value='69.583333' where id=108736; +update datasetfieldvalue set value='19.216667' where id=108743; +update datasetfieldvalue set value='69.583333' where id=108752; +update datasetfieldvalue set value='19.216667' where id=108769; +update datasetfieldvalue set value='19.216667' where id=108805; +update datasetfieldvalue set value='19.216667' where id=108811; +update datasetfieldvalue set value='69.583333' where id=108812; +update datasetfieldvalue set value='69.583333' where id=108815; +update datasetfieldvalue set value='69.583333' where id=108826; +update datasetfieldvalue set value='19.216667' where id=108829; +update datasetfieldvalue set value='69.583333' where id=108841; +update datasetfieldvalue set value='19.216667' where id=108842; +update datasetfieldvalue set value='19.216667' where id=108875; +update datasetfieldvalue set value='69.583333' where id=108877; +update datasetfieldvalue set value='19.216667' where id=108878; +update datasetfieldvalue set value='69.583333' where id=108883; +update datasetfieldvalue set value='69.583333' where id=108902; +update datasetfieldvalue set value='19.216667' where id=108903; +update datasetfieldvalue set value='69.583333' where id=108908; +update datasetfieldvalue set value='19.216667' where id=108913; +update datasetfieldvalue set value='19.216667' where id=108942; +update datasetfieldvalue set value='69.583333' where id=108947; +update datasetfieldvalue set value='69.583333' where id=108950; +update datasetfieldvalue set value='19.216667' where id=108982; +update datasetfieldvalue set value='69.583333' where id=109005; +update datasetfieldvalue set value='19.216667' where id=109011; +update datasetfieldvalue set value='19.216667' where id=109012; +update datasetfieldvalue set value='69.583333' where id=109014; +update datasetfieldvalue set value='69.583333' where id=109032; +update datasetfieldvalue set value='69.583333' where id=109053; +update datasetfieldvalue set value='19.216667' where id=109055; +update datasetfieldvalue set value='19.216667' where id=109056; +update datasetfieldvalue set value='69.583333' where id=109079; +update datasetfieldvalue set value='69.583333' where id=109089; +update datasetfieldvalue set value='19.216667' where id=109098; +update datasetfieldvalue set value='19.216667' where id=109102; +update datasetfieldvalue set value='19.216667' where id=109112; +update datasetfieldvalue set value='69.583333' where id=109125; +update datasetfieldvalue set value='69.583333' where id=109128; +update datasetfieldvalue set value='19.216667' where id=109129; +update datasetfieldvalue set value='69.583333' where id=109166; +update datasetfieldvalue set value='19.216667' where id=109169; +update datasetfieldvalue set value='19.216667' where id=109174; +update datasetfieldvalue set value='69.583333' where id=109188; +update datasetfieldvalue set value='69.583333' where id=109202; +update datasetfieldvalue set value='19.216667' where id=109217; +update datasetfieldvalue set value='19.216667' where id=109220; +update datasetfieldvalue set value='69.583333' where id=109224; +update datasetfieldvalue set value='69.583333' where id=109248; +update datasetfieldvalue set value='19.216667' where id=109257; +update datasetfieldvalue set value='69.583333' where id=109258; +update datasetfieldvalue set value='19.216667' where id=109271; +update datasetfieldvalue set value='19.216667' where id=109294; +update datasetfieldvalue set value='69.583333' where id=109301; +update datasetfieldvalue set value='19.216667' where id=109315; +update datasetfieldvalue set value='69.583333' where id=109318; +update datasetfieldvalue set value='69.583333' where id=109325; +update datasetfieldvalue set value='19.216667' where id=109338; +update datasetfieldvalue set value='69.583333' where id=109354; +update datasetfieldvalue set value='19.216667' where id=109357; +update datasetfieldvalue set value='19.216667' where id=109371; +update datasetfieldvalue set value='69.583333' where id=109375; +update datasetfieldvalue set value='69.583333' where id=109380; +update datasetfieldvalue set value='19.216667' where id=109385; +update datasetfieldvalue set value='19.216667' where id=109407; +update datasetfieldvalue set value='69.583333' where id=109408; +update datasetfieldvalue set value='19.216667' where id=109424; +update datasetfieldvalue set value='69.583333' where id=109444; +update datasetfieldvalue set value='19.216667' where id=109459; +update datasetfieldvalue set value='69.583333' where id=109475; +update datasetfieldvalue set value='19.216667' where id=109482; +update datasetfieldvalue set value='69.583333' where id=109486; +update datasetfieldvalue set value='69.583333' where id=109488; +update datasetfieldvalue set value='19.216667' where id=109493; +update datasetfieldvalue set value='19.216667' where id=109512; +update datasetfieldvalue set value='69.583333' where id=109515; +update datasetfieldvalue set value='19.216667' where id=109534; +update datasetfieldvalue set value='19.216667' where id=109544; +update datasetfieldvalue set value='69.583333' where id=109555; +update datasetfieldvalue set value='69.583333' where id=109558; +update datasetfieldvalue set value='69.583333' where id=109579; +update datasetfieldvalue set value='69.583333' where id=109588; +update datasetfieldvalue set value='19.216667' where id=109600; +update datasetfieldvalue set value='19.216667' where id=109613; +update datasetfieldvalue set value='19.216667' where id=109625; +update datasetfieldvalue set value='19.216667' where id=109634; +update datasetfieldvalue set value='69.583333' where id=109638; +update datasetfieldvalue set value='69.583333' where id=109654; +update datasetfieldvalue set value='69.583333' where id=109669; +update datasetfieldvalue set value='19.216667' where id=109675; +update datasetfieldvalue set value='19.216667' where id=109682; +update datasetfieldvalue set value='69.583333' where id=109697; +update datasetfieldvalue set value='19.216667' where id=109709; +update datasetfieldvalue set value='69.583333' where id=109728; +update datasetfieldvalue set value='19.216667' where id=109729; +update datasetfieldvalue set value='69.583333' where id=109734; +update datasetfieldvalue set value='69.583333' where id=109741; +update datasetfieldvalue set value='19.216667' where id=109750; +update datasetfieldvalue set value='69.583333' where id=109763; +update datasetfieldvalue set value='19.216667' where id=109779; +update datasetfieldvalue set value='19.216667' where id=109789; +update datasetfieldvalue set value='69.583333' where id=109795; +update datasetfieldvalue set value='69.583333' where id=109807; +update datasetfieldvalue set value='19.216667' where id=109818; +update datasetfieldvalue set value='69.583333' where id=109831; +update datasetfieldvalue set value='69.583333' where id=109845; +update datasetfieldvalue set value='19.216667' where id=109852; +update datasetfieldvalue set value='19.216667' where id=109861; +update datasetfieldvalue set value='69.583333' where id=109867; +update datasetfieldvalue set value='19.216667' where id=109871; +update datasetfieldvalue set value='69.583333' where id=109873; +update datasetfieldvalue set value='19.216667' where id=109883; +update datasetfieldvalue set value='19.216667' where id=109923; +update datasetfieldvalue set value='69.583333' where id=109932; +update datasetfieldvalue set value='69.583333' where id=109933; +update datasetfieldvalue set value='19.216667' where id=109937; +update datasetfieldvalue set value='69.583333' where id=109953; +update datasetfieldvalue set value='19.216667' where id=109955; +update datasetfieldvalue set value='69.583333' where id=109957; +update datasetfieldvalue set value='19.216667' where id=109988; +update datasetfieldvalue set value='19.216667' where id=109994; +update datasetfieldvalue set value='69.583333' where id=109995; +update datasetfieldvalue set value='69.583333' where id=110020; +update datasetfieldvalue set value='19.216667' where id=110031; +update datasetfieldvalue set value='19.216667' where id=110034; +update datasetfieldvalue set value='19.216667' where id=110044; +update datasetfieldvalue set value='69.583333' where id=110053; +update datasetfieldvalue set value='69.583333' where id=110074; +update datasetfieldvalue set value='19.216667' where id=110079; +update datasetfieldvalue set value='19.216667' where id=110092; +update datasetfieldvalue set value='69.583333' where id=110105; +update datasetfieldvalue set value='69.583333' where id=110115; +update datasetfieldvalue set value='69.583333' where id=110129; +update datasetfieldvalue set value='69.583333' where id=110137; +update datasetfieldvalue set value='19.216667' where id=110152; +update datasetfieldvalue set value='19.216667' where id=110157; +update datasetfieldvalue set value='19.216667' where id=110170; +update datasetfieldvalue set value='69.583333' where id=110181; +update datasetfieldvalue set value='69.583333' where id=110182; +update datasetfieldvalue set value='19.216667' where id=110201; +update datasetfieldvalue set value='69.583333' where id=110209; +update datasetfieldvalue set value='19.216667' where id=110213; +update datasetfieldvalue set value='69.583333' where id=110219; +update datasetfieldvalue set value='19.216667' where id=110226; +update datasetfieldvalue set value='69.583333' where id=110270; +update datasetfieldvalue set value='19.216667' where id=110271; +update datasetfieldvalue set value='19.216667' where id=110275; +update datasetfieldvalue set value='69.583333' where id=110281; +update datasetfieldvalue set value='69.583333' where id=110304; +update datasetfieldvalue set value='19.216667' where id=110308; +update datasetfieldvalue set value='19.216667' where id=110311; +update datasetfieldvalue set value='69.583333' where id=110316; +update datasetfieldvalue set value='69.583333' where id=110330; +update datasetfieldvalue set value='19.216667' where id=110356; +update datasetfieldvalue set value='69.583333' where id=110363; +update datasetfieldvalue set value='19.216667' where id=110365; +update datasetfieldvalue set value='69.583333' where id=110378; +update datasetfieldvalue set value='19.216667' where id=110388; +update datasetfieldvalue set value='69.583333' where id=110392; +update datasetfieldvalue set value='19.216667' where id=110410; +update datasetfieldvalue set value='19.216667' where id=110421; +update datasetfieldvalue set value='69.583333' where id=110441; +update datasetfieldvalue set value='19.216667' where id=110447; +update datasetfieldvalue set value='69.583333' where id=110449; +update datasetfieldvalue set value='19.216667' where id=110460; +update datasetfieldvalue set value='69.583333' where id=110463; +update datasetfieldvalue set value='19.216667' where id=110472; +update datasetfieldvalue set value='69.583333' where id=110485; +update datasetfieldvalue set value='19.216667' where id=110498; +update datasetfieldvalue set value='69.583333' where id=110499; +update datasetfieldvalue set value='19.216667' where id=110519; +update datasetfieldvalue set value='69.583333' where id=110528; +update datasetfieldvalue set value='69.583333' where id=110553; +update datasetfieldvalue set value='19.216667' where id=110558; +update datasetfieldvalue set value='69.583333' where id=110563; +update datasetfieldvalue set value='19.216667' where id=110572; +update datasetfieldvalue set value='69.583333' where id=110588; +update datasetfieldvalue set value='19.216667' where id=110600; +update datasetfieldvalue set value='19.216667' where id=110601; +update datasetfieldvalue set value='69.583333' where id=110619; +update datasetfieldvalue set value='19.216667' where id=110652; +update datasetfieldvalue set value='19.216667' where id=110653; +update datasetfieldvalue set value='69.583333' where id=110655; +update datasetfieldvalue set value='69.583333' where id=110661; +update datasetfieldvalue set value='19.216667' where id=110672; +update datasetfieldvalue set value='69.583333' where id=110681; +update datasetfieldvalue set value='69.583333' where id=110687; +update datasetfieldvalue set value='19.216667' where id=110704; +update datasetfieldvalue set value='69.583333' where id=110716; +update datasetfieldvalue set value='19.216667' where id=110721; +update datasetfieldvalue set value='19.216667' where id=110737; +update datasetfieldvalue set value='69.583333' where id=110746; +update datasetfieldvalue set value='19.216667' where id=110752; +update datasetfieldvalue set value='19.216667' where id=110764; +update datasetfieldvalue set value='69.583333' where id=110772; +update datasetfieldvalue set value='69.583333' where id=110788; +update datasetfieldvalue set value='19.216667' where id=110807; +update datasetfieldvalue set value='19.216667' where id=110811; +update datasetfieldvalue set value='69.583333' where id=110820; +update datasetfieldvalue set value='69.583333' where id=110828; +update datasetfieldvalue set value='69.583333' where id=110833; +update datasetfieldvalue set value='19.216667' where id=110844; +update datasetfieldvalue set value='19.216667' where id=110851; +update datasetfieldvalue set value='69.583333' where id=110853; +update datasetfieldvalue set value='69.583333' where id=110878; +update datasetfieldvalue set value='19.216667' where id=110880; +update datasetfieldvalue set value='69.583333' where id=110904; +update datasetfieldvalue set value='19.216667' where id=110913; +update datasetfieldvalue set value='69.583333' where id=110920; +update datasetfieldvalue set value='19.216667' where id=110924; +update datasetfieldvalue set value='19.216667' where id=110935; +update datasetfieldvalue set value='69.583333' where id=110949; +update datasetfieldvalue set value='19.216667' where id=110965; +update datasetfieldvalue set value='69.583333' where id=110966; +update datasetfieldvalue set value='69.583333' where id=110967; +update datasetfieldvalue set value='19.216667' where id=110999; +update datasetfieldvalue set value='69.583333' where id=111010; +update datasetfieldvalue set value='19.216667' where id=111034; +update datasetfieldvalue set value='69.583333' where id=111040; +update datasetfieldvalue set value='19.216667' where id=111041; +update datasetfieldvalue set value='69.583333' where id=111052; +update datasetfieldvalue set value='19.216667' where id=111061; +update datasetfieldvalue set value='19.216667' where id=111063; +update datasetfieldvalue set value='69.583333' where id=111075; +update datasetfieldvalue set value='69.583333' where id=111098; +update datasetfieldvalue set value='69.583333' where id=111101; +update datasetfieldvalue set value='19.216667' where id=111105; +update datasetfieldvalue set value='19.216667' where id=111123; +update datasetfieldvalue set value='69.583333' where id=111128; +update datasetfieldvalue set value='19.216667' where id=111133; +update datasetfieldvalue set value='19.216667' where id=111145; +update datasetfieldvalue set value='69.583333' where id=111156; +update datasetfieldvalue set value='19.216667' where id=111171; +update datasetfieldvalue set value='69.583333' where id=111185; +update datasetfieldvalue set value='69.583333' where id=111193; +update datasetfieldvalue set value='19.216667' where id=111207; +update datasetfieldvalue set value='19.216667' where id=111226; +update datasetfieldvalue set value='69.583333' where id=111237; +update datasetfieldvalue set value='19.216667' where id=111245; +update datasetfieldvalue set value='69.583333' where id=111249; +update datasetfieldvalue set value='19.216667' where id=111255; +update datasetfieldvalue set value='69.583333' where id=111274; +update datasetfieldvalue set value='69.583333' where id=111275; +update datasetfieldvalue set value='19.216667' where id=111277; +update datasetfieldvalue set value='69.583333' where id=111302; +update datasetfieldvalue set value='19.216667' where id=111310; +update datasetfieldvalue set value='69.583333' where id=111316; +update datasetfieldvalue set value='19.216667' where id=111317; +update datasetfieldvalue set value='69.583333' where id=111336; +update datasetfieldvalue set value='69.583333' where id=111341; +update datasetfieldvalue set value='19.216667' where id=111364; +update datasetfieldvalue set value='19.216667' where id=111376; +update datasetfieldvalue set value='69.583333' where id=111379; +update datasetfieldvalue set value='19.216667' where id=111407; +update datasetfieldvalue set value='19.216667' where id=111412; +update datasetfieldvalue set value='69.583333' where id=111415; +update datasetfieldvalue set value='19.216667' where id=111432; +update datasetfieldvalue set value='69.583333' where id=111452; +update datasetfieldvalue set value='69.583333' where id=111458; +update datasetfieldvalue set value='19.216667' where id=111460; +update datasetfieldvalue set value='69.583333' where id=111463; +update datasetfieldvalue set value='19.216667' where id=111467; +update datasetfieldvalue set value='19.216667' where id=111473; +update datasetfieldvalue set value='69.583333' where id=111494; +update datasetfieldvalue set value='69.583333' where id=111516; +update datasetfieldvalue set value='19.216667' where id=111518; +update datasetfieldvalue set value='69.583333' where id=111519; +update datasetfieldvalue set value='19.216667' where id=111540; +update datasetfieldvalue set value='19.216667' where id=111546; +update datasetfieldvalue set value='69.583333' where id=111554; +update datasetfieldvalue set value='69.583333' where id=111563; +update datasetfieldvalue set value='19.216667' where id=111568; +update datasetfieldvalue set value='19.216667' where id=111594; +update datasetfieldvalue set value='19.216667' where id=111608; +update datasetfieldvalue set value='69.583333' where id=111617; +update datasetfieldvalue set value='69.583333' where id=111628; +update datasetfieldvalue set value='19.216667' where id=111638; +update datasetfieldvalue set value='69.583333' where id=111640; +update datasetfieldvalue set value='19.216667' where id=111660; +update datasetfieldvalue set value='69.583333' where id=111669; +update datasetfieldvalue set value='19.216667' where id=111686; +update datasetfieldvalue set value='69.583333' where id=111692; +update datasetfieldvalue set value='69.583333' where id=111706; +update datasetfieldvalue set value='19.216667' where id=111708; +update datasetfieldvalue set value='69.583333' where id=111717; +update datasetfieldvalue set value='69.583333' where id=111719; +update datasetfieldvalue set value='19.216667' where id=111723; +update datasetfieldvalue set value='19.216667' where id=111755; +update datasetfieldvalue set value='69.583333' where id=111758; +update datasetfieldvalue set value='19.216667' where id=111780; +update datasetfieldvalue set value='69.583333' where id=111785; +update datasetfieldvalue set value='19.216667' where id=111790; +update datasetfieldvalue set value='69.583333' where id=111805; +update datasetfieldvalue set value='69.583333' where id=111807; +update datasetfieldvalue set value='19.216667' where id=111833; +update datasetfieldvalue set value='19.216667' where id=111839; +update datasetfieldvalue set value='69.583333' where id=111851; +update datasetfieldvalue set value='19.216667' where id=111864; +update datasetfieldvalue set value='19.216667' where id=111869; +update datasetfieldvalue set value='69.583333' where id=111870; +update datasetfieldvalue set value='69.583333' where id=111884; +update datasetfieldvalue set value='19.216667' where id=111910; +update datasetfieldvalue set value='69.583333' where id=111918; +update datasetfieldvalue set value='19.216667' where id=111922; +update datasetfieldvalue set value='69.583333' where id=111932; +update datasetfieldvalue set value='69.583333' where id=111934; +update datasetfieldvalue set value='19.216667' where id=111943; +update datasetfieldvalue set value='19.216667' where id=111953; +update datasetfieldvalue set value='69.583333' where id=111973; +update datasetfieldvalue set value='19.216667' where id=111983; +update datasetfieldvalue set value='69.583333' where id=111992; +update datasetfieldvalue set value='19.216667' where id=111994; +update datasetfieldvalue set value='69.583333' where id=112026; +update datasetfieldvalue set value='69.583333' where id=112027; +update datasetfieldvalue set value='19.216667' where id=112034; +update datasetfieldvalue set value='19.216667' where id=112048; +update datasetfieldvalue set value='69.583333' where id=112054; +update datasetfieldvalue set value='19.216667' where id=112057; +update datasetfieldvalue set value='19.216667' where id=112081; +update datasetfieldvalue set value='69.583333' where id=112086; +update datasetfieldvalue set value='19.216667' where id=112107; +update datasetfieldvalue set value='19.216667' where id=112109; +update datasetfieldvalue set value='69.583333' where id=112118; +update datasetfieldvalue set value='69.583333' where id=112123; +update datasetfieldvalue set value='19.216667' where id=112145; +update datasetfieldvalue set value='69.583333' where id=112155; +update datasetfieldvalue set value='19.216667' where id=112163; +update datasetfieldvalue set value='69.583333' where id=112170; +update datasetfieldvalue set value='19.216667' where id=112179; +update datasetfieldvalue set value='19.216667' where id=112184; +update datasetfieldvalue set value='69.583333' where id=112201; +update datasetfieldvalue set value='69.583333' where id=112210; +update datasetfieldvalue set value='19.216667' where id=112224; +update datasetfieldvalue set value='19.216667' where id=112234; +update datasetfieldvalue set value='69.583333' where id=112247; +update datasetfieldvalue set value='69.583333' where id=112249; +update datasetfieldvalue set value='69.583333' where id=112269; +update datasetfieldvalue set value='19.216667' where id=112277; +update datasetfieldvalue set value='69.583333' where id=112280; +update datasetfieldvalue set value='19.216667' where id=112282; +update datasetfieldvalue set value='19.216667' where id=112308; +update datasetfieldvalue set value='69.583333' where id=112312; +update datasetfieldvalue set value='19.216667' where id=112333; +update datasetfieldvalue set value='69.583333' where id=112336; +update datasetfieldvalue set value='69.583333' where id=112345; +update datasetfieldvalue set value='19.216667' where id=112351; +update datasetfieldvalue set value='69.583333' where id=112362; +update datasetfieldvalue set value='19.216667' where id=112374; +update datasetfieldvalue set value='19.216667' where id=112386; +update datasetfieldvalue set value='19.216667' where id=112394; +update datasetfieldvalue set value='69.583333' where id=112408; +update datasetfieldvalue set value='69.583333' where id=112415; +update datasetfieldvalue set value='19.216667' where id=112447; +update datasetfieldvalue set value='69.583333' where id=112450; +update datasetfieldvalue set value='19.216667' where id=112463; +update datasetfieldvalue set value='69.583333' where id=112467; +update datasetfieldvalue set value='19.216667' where id=112476; +update datasetfieldvalue set value='69.583333' where id=112477; +update datasetfieldvalue set value='19.216667' where id=112481; +update datasetfieldvalue set value='69.583333' where id=112503; +update datasetfieldvalue set value='69.583333' where id=112525; +update datasetfieldvalue set value='19.216667' where id=112528; +update datasetfieldvalue set value='19.216667' where id=112531; +update datasetfieldvalue set value='69.583333' where id=112539; +update datasetfieldvalue set value='69.583333' where id=112554; +update datasetfieldvalue set value='69.583333' where id=112574; +update datasetfieldvalue set value='19.216667' where id=112588; +update datasetfieldvalue set value='19.216667' where id=112592; +update datasetfieldvalue set value='69.583333' where id=112609; +update datasetfieldvalue set value='19.216667' where id=112624; +update datasetfieldvalue set value='69.583333' where id=112625; +update datasetfieldvalue set value='19.216667' where id=112634; +update datasetfieldvalue set value='69.583333' where id=112659; +update datasetfieldvalue set value='69.583333' where id=112660; +update datasetfieldvalue set value='19.216667' where id=112666; +update datasetfieldvalue set value='19.216667' where id=112678; +update datasetfieldvalue set value='19.216667' where id=112690; +update datasetfieldvalue set value='19.216667' where id=112704; +update datasetfieldvalue set value='69.583333' where id=112705; +update datasetfieldvalue set value='69.583333' where id=112718; +update datasetfieldvalue set value='19.216667' where id=112724; +update datasetfieldvalue set value='69.583333' where id=112725; +update datasetfieldvalue set value='69.583333' where id=112757; +update datasetfieldvalue set value='19.216667' where id=112758; +update datasetfieldvalue set value='69.583333' where id=112766; +update datasetfieldvalue set value='69.583333' where id=112774; +update datasetfieldvalue set value='19.216667' where id=112796; +update datasetfieldvalue set value='19.216667' where id=112802; +update datasetfieldvalue set value='19.216667' where id=112808; +update datasetfieldvalue set value='19.216667' where id=112830; +update datasetfieldvalue set value='69.583333' where id=112833; +update datasetfieldvalue set value='69.583333' where id=112834; +update datasetfieldvalue set value='19.216667' where id=112848; +update datasetfieldvalue set value='69.583333' where id=112850; +update datasetfieldvalue set value='69.583333' where id=112868; +update datasetfieldvalue set value='19.216667' where id=112874; +update datasetfieldvalue set value='19.216667' where id=112897; +update datasetfieldvalue set value='19.216667' where id=112904; +update datasetfieldvalue set value='69.583333' where id=112911; +update datasetfieldvalue set value='69.583333' where id=112913; +update datasetfieldvalue set value='69.583333' where id=112950; +update datasetfieldvalue set value='19.216667' where id=112953; +update datasetfieldvalue set value='19.216667' where id=112966; +update datasetfieldvalue set value='69.583333' where id=112970; +update datasetfieldvalue set value='19.216667' where id=112980; +update datasetfieldvalue set value='69.583333' where id=112984; +update datasetfieldvalue set value='69.583333' where id=112991; +update datasetfieldvalue set value='19.216667' where id=113000; +update datasetfieldvalue set value='19.216667' where id=113027; +update datasetfieldvalue set value='69.583333' where id=113043; +update datasetfieldvalue set value='19.216667' where id=113046; +update datasetfieldvalue set value='69.583333' where id=113048; +update datasetfieldvalue set value='69.583333' where id=113062; +update datasetfieldvalue set value='19.216667' where id=113070; +update datasetfieldvalue set value='19.216667' where id=113095; +update datasetfieldvalue set value='69.583333' where id=113096; +update datasetfieldvalue set value='69.583333' where id=113112; +update datasetfieldvalue set value='19.216667' where id=113119; +update datasetfieldvalue set value='19.216667' where id=113126; +update datasetfieldvalue set value='69.583333' where id=113132; +update datasetfieldvalue set value='19.216667' where id=113145; +update datasetfieldvalue set value='69.583333' where id=113149; +update datasetfieldvalue set value='19.216667' where id=113177; +update datasetfieldvalue set value='69.583333' where id=113180; +update datasetfieldvalue set value='19.216667' where id=113184; +update datasetfieldvalue set value='19.216667' where id=113192; +update datasetfieldvalue set value='69.583333' where id=113195; +update datasetfieldvalue set value='69.583333' where id=113215; +update datasetfieldvalue set value='19.216667' where id=113245; +update datasetfieldvalue set value='69.583333' where id=113257; +update datasetfieldvalue set value='69.583333' where id=113261; +update datasetfieldvalue set value='19.216667' where id=113262; +update datasetfieldvalue set value='19.216667' where id=113269; +update datasetfieldvalue set value='69.583333' where id=113276; +update datasetfieldvalue set value='19.216667' where id=113293; +update datasetfieldvalue set value='69.583333' where id=113296; +update datasetfieldvalue set value='69.583333' where id=113315; +update datasetfieldvalue set value='19.216667' where id=113316; +update datasetfieldvalue set value='69.583333' where id=113322; +update datasetfieldvalue set value='19.216667' where id=113342; +update datasetfieldvalue set value='69.583333' where id=113354; +update datasetfieldvalue set value='19.216667' where id=113358; +update datasetfieldvalue set value='19.216667' where id=113363; +update datasetfieldvalue set value='69.583333' where id=113387; +update datasetfieldvalue set value='19.216667' where id=113396; +update datasetfieldvalue set value='69.583333' where id=113405; +update datasetfieldvalue set value='69.583333' where id=113425; +update datasetfieldvalue set value='19.216667' where id=113426; +update datasetfieldvalue set value='69.583333' where id=113439; +update datasetfieldvalue set value='19.216667' where id=113440; +update datasetfieldvalue set value='19.216667' where id=113454; +update datasetfieldvalue set value='69.583333' where id=113471; +update datasetfieldvalue set value='69.583333' where id=113479; +update datasetfieldvalue set value='19.216667' where id=113499; +update datasetfieldvalue set value='69.583333' where id=113508; +update datasetfieldvalue set value='19.216667' where id=113511; +update datasetfieldvalue set value='19.216667' where id=113521; +update datasetfieldvalue set value='69.583333' where id=113524; +update datasetfieldvalue set value='69.583333' where id=113536; +update datasetfieldvalue set value='19.216667' where id=113540; +update datasetfieldvalue set value='69.583333' where id=113562; +update datasetfieldvalue set value='19.216667' where id=113573; +update datasetfieldvalue set value='69.583333' where id=113592; +update datasetfieldvalue set value='19.216667' where id=113599; +update datasetfieldvalue set value='69.583333' where id=113607; +update datasetfieldvalue set value='19.216667' where id=113616; +update datasetfieldvalue set value='69.583333' where id=113618; +update datasetfieldvalue set value='19.216667' where id=113643; +update datasetfieldvalue set value='69.583333' where id=113646; +update datasetfieldvalue set value='19.216667' where id=113649; +update datasetfieldvalue set value='19.216667' where id=113661; +update datasetfieldvalue set value='69.583333' where id=113685; +update datasetfieldvalue set value='19.216667' where id=113710; +update datasetfieldvalue set value='69.583333' where id=113713; +update datasetfieldvalue set value='69.583333' where id=113714; +update datasetfieldvalue set value='19.216667' where id=113723; +update datasetfieldvalue set value='69.583333' where id=113732; +update datasetfieldvalue set value='19.216667' where id=113737; +update datasetfieldvalue set value='69.583333' where id=113739; +update datasetfieldvalue set value='19.216667' where id=113761; +update datasetfieldvalue set value='19.216667' where id=113777; +update datasetfieldvalue set value='19.216667' where id=113790; +update datasetfieldvalue set value='69.583333' where id=113794; +update datasetfieldvalue set value='69.583333' where id=113808; +update datasetfieldvalue set value='19.216667' where id=113817; +update datasetfieldvalue set value='69.583333' where id=113823; +update datasetfieldvalue set value='69.583333' where id=113828; +update datasetfieldvalue set value='19.216667' where id=113834; +update datasetfieldvalue set value='19.216667' where id=113867; +update datasetfieldvalue set value='69.583333' where id=113877; +update datasetfieldvalue set value='69.583333' where id=113890; +update datasetfieldvalue set value='19.216667' where id=113891; +update datasetfieldvalue set value='19.216667' where id=113908; +update datasetfieldvalue set value='19.216667' where id=113923; +update datasetfieldvalue set value='69.583333' where id=113930; +update datasetfieldvalue set value='69.583333' where id=113938; +update datasetfieldvalue set value='69.583333' where id=113941; +update datasetfieldvalue set value='69.583333' where id=113969; +update datasetfieldvalue set value='19.216667' where id=113974; +update datasetfieldvalue set value='19.216667' where id=113979; +update datasetfieldvalue set value='19.216667' where id=113999; +update datasetfieldvalue set value='19.216667' where id=114008; +update datasetfieldvalue set value='69.583333' where id=114014; +update datasetfieldvalue set value='69.583333' where id=114015; +update datasetfieldvalue set value='19.216667' where id=114034; +update datasetfieldvalue set value='69.583333' where id=114049; +update datasetfieldvalue set value='69.583333' where id=114053; +update datasetfieldvalue set value='19.216667' where id=114060; +update datasetfieldvalue set value='19.216667' where id=114075; +update datasetfieldvalue set value='69.583333' where id=114079; +update datasetfieldvalue set value='19.216667' where id=114090; +update datasetfieldvalue set value='69.583333' where id=114092; +update datasetfieldvalue set value='69.583333' where id=114110; +update datasetfieldvalue set value='19.216667' where id=114119; +update datasetfieldvalue set value='69.583333' where id=114123; +update datasetfieldvalue set value='19.216667' where id=114128; +update datasetfieldvalue set value='69.583333' where id=114156; +update datasetfieldvalue set value='69.583333' where id=114161; +update datasetfieldvalue set value='19.216667' where id=114166; +update datasetfieldvalue set value='19.216667' where id=114190; +update datasetfieldvalue set value='69.583333' where id=114194; +update datasetfieldvalue set value='19.216667' where id=114197; +update datasetfieldvalue set value='19.216667' where id=114222; +update datasetfieldvalue set value='69.583333' where id=114227; +update datasetfieldvalue set value='19.216667' where id=114234; +update datasetfieldvalue set value='69.583333' where id=114258; +update datasetfieldvalue set value='69.583333' where id=114260; +update datasetfieldvalue set value='19.216667' where id=114263; +update datasetfieldvalue set value='69.583333' where id=114288; +update datasetfieldvalue set value='69.583333' where id=114301; +update datasetfieldvalue set value='19.216667' where id=114303; +update datasetfieldvalue set value='19.216667' where id=114314; +update datasetfieldvalue set value='69.583333' where id=114320; +update datasetfieldvalue set value='19.216667' where id=114323; +update datasetfieldvalue set value='19.216667' where id=114330; +update datasetfieldvalue set value='69.583333' where id=114355; +update datasetfieldvalue set value='69.583333' where id=114372; +update datasetfieldvalue set value='69.583333' where id=114384; +update datasetfieldvalue set value='19.216667' where id=114386; +update datasetfieldvalue set value='19.216667' where id=114388; +update datasetfieldvalue set value='19.216667' where id=114406; +update datasetfieldvalue set value='69.583333' where id=114418; +update datasetfieldvalue set value='19.216667' where id=114423; +update datasetfieldvalue set value='69.583333' where id=114435; +update datasetfieldvalue set value='19.216667' where id=114454; +update datasetfieldvalue set value='19.216667' where id=114456; +update datasetfieldvalue set value='69.583333' where id=114473; +update datasetfieldvalue set value='69.583333' where id=114477; +update datasetfieldvalue set value='69.583333' where id=114507; +update datasetfieldvalue set value='19.216667' where id=114519; +update datasetfieldvalue set value='69.583333' where id=114520; +update datasetfieldvalue set value='19.216667' where id=114523; +update datasetfieldvalue set value='19.216667' where id=114540; +update datasetfieldvalue set value='69.583333' where id=114543; +update datasetfieldvalue set value='69.583333' where id=114559; +update datasetfieldvalue set value='19.216667' where id=114569; +update datasetfieldvalue set value='19.216667' where id=114575; +update datasetfieldvalue set value='69.583333' where id=114586; +update datasetfieldvalue set value='69.583333' where id=114595; +update datasetfieldvalue set value='19.216667' where id=114609; +update datasetfieldvalue set value='19.216667' where id=114625; +update datasetfieldvalue set value='69.583333' where id=114629; +update datasetfieldvalue set value='69.583333' where id=114637; +update datasetfieldvalue set value='19.216667' where id=114638; +update datasetfieldvalue set value='69.583333' where id=114662; +update datasetfieldvalue set value='69.583333' where id=114671; +update datasetfieldvalue set value='19.216667' where id=114673; +update datasetfieldvalue set value='19.216667' where id=114682; +update datasetfieldvalue set value='19.216667' where id=114701; +update datasetfieldvalue set value='69.583333' where id=114706; +update datasetfieldvalue set value='69.583333' where id=114725; +update datasetfieldvalue set value='19.216667' where id=114736; +update datasetfieldvalue set value='69.583333' where id=114740; +update datasetfieldvalue set value='69.583333' where id=114755; +update datasetfieldvalue set value='19.216667' where id=114761; +update datasetfieldvalue set value='19.216667' where id=114772; +update datasetfieldvalue set value='19.216667' where id=114789; +update datasetfieldvalue set value='69.583333' where id=114791; +update datasetfieldvalue set value='69.583333' where id=114795; +update datasetfieldvalue set value='19.216667' where id=114802; +update datasetfieldvalue set value='69.583333' where id=114823; +update datasetfieldvalue set value='19.216667' where id=114841; +update datasetfieldvalue set value='69.583333' where id=114843; +update datasetfieldvalue set value='19.216667' where id=114857; +update datasetfieldvalue set value='69.583333' where id=114891; +update datasetfieldvalue set value='69.583333' where id=114899; +update datasetfieldvalue set value='19.216667' where id=114901; +update datasetfieldvalue set value='19.216667' where id=114905; +update datasetfieldvalue set value='69.583333' where id=114926; +update datasetfieldvalue set value='19.216667' where id=114932; +update datasetfieldvalue set value='69.583333' where id=114937; +update datasetfieldvalue set value='19.216667' where id=114942; +update datasetfieldvalue set value='19.216667' where id=114949; +update datasetfieldvalue set value='69.583333' where id=114955; +update datasetfieldvalue set value='69.583333' where id=114967; +update datasetfieldvalue set value='19.216667' where id=114977; +update datasetfieldvalue set value='19.216667' where id=114991; +update datasetfieldvalue set value='69.583333' where id=115001; +update datasetfieldvalue set value='19.216667' where id=115009; +update datasetfieldvalue set value='69.583333' where id=115028; +update datasetfieldvalue set value='19.216667' where id=115034; +update datasetfieldvalue set value='19.216667' where id=115043; +update datasetfieldvalue set value='69.583333' where id=115047; +update datasetfieldvalue set value='69.583333' where id=115067; +update datasetfieldvalue set value='19.216667' where id=115075; +update datasetfieldvalue set value='19.216667' where id=115095; +update datasetfieldvalue set value='69.583333' where id=115099; +update datasetfieldvalue set value='69.583333' where id=115102; +update datasetfieldvalue set value='19.216667' where id=115120; +update datasetfieldvalue set value='69.583333' where id=115123; +update datasetfieldvalue set value='19.216667' where id=115137; +update datasetfieldvalue set value='69.583333' where id=115141; +update datasetfieldvalue set value='19.216667' where id=115162; +update datasetfieldvalue set value='19.216667' where id=115165; +update datasetfieldvalue set value='69.583333' where id=115174; +update datasetfieldvalue set value='69.583333' where id=115181; +update datasetfieldvalue set value='19.216667' where id=115204; +update datasetfieldvalue set value='19.216667' where id=115205; +update datasetfieldvalue set value='69.583333' where id=115206; +update datasetfieldvalue set value='69.583333' where id=115239; +update datasetfieldvalue set value='19.216667' where id=115251; +update datasetfieldvalue set value='19.216667' where id=115270; +update datasetfieldvalue set value='69.583333' where id=115272; +update datasetfieldvalue set value='69.583333' where id=115279; +update datasetfieldvalue set value='19.216667' where id=115287; +update datasetfieldvalue set value='69.583333' where id=115289; +update datasetfieldvalue set value='69.583333' where id=115291; +update datasetfieldvalue set value='19.216667' where id=115298; +update datasetfieldvalue set value='69.583333' where id=115326; +update datasetfieldvalue set value='19.216667' where id=115327; +update datasetfieldvalue set value='69.583333' where id=115348; +update datasetfieldvalue set value='19.216667' where id=115362; +update datasetfieldvalue set value='19.216667' where id=115386; +update datasetfieldvalue set value='69.583333' where id=115406; +update datasetfieldvalue set value='69.583333' where id=115407; +update datasetfieldvalue set value='19.216667' where id=115408; +update datasetfieldvalue set value='19.216667' where id=115410; +update datasetfieldvalue set value='69.583333' where id=115413; +update datasetfieldvalue set value='19.216667' where id=115417; +update datasetfieldvalue set value='69.583333' where id=115447; +update datasetfieldvalue set value='69.583333' where id=115452; +update datasetfieldvalue set value='19.216667' where id=115468; +update datasetfieldvalue set value='69.583333' where id=115474; +update datasetfieldvalue set value='19.216667' where id=115478; +update datasetfieldvalue set value='19.216667' where id=115498; +update datasetfieldvalue set value='19.216667' where id=115499; +update datasetfieldvalue set value='69.583333' where id=115529; +update datasetfieldvalue set value='69.583333' where id=115530; +update datasetfieldvalue set value='69.583333' where id=115538; +update datasetfieldvalue set value='19.216667' where id=115540; +update datasetfieldvalue set value='69.583333' where id=115546; +update datasetfieldvalue set value='19.216667' where id=115570; +update datasetfieldvalue set value='19.216667' where id=115579; +update datasetfieldvalue set value='19.216667' where id=115584; +update datasetfieldvalue set value='69.583333' where id=115600; +update datasetfieldvalue set value='69.583333' where id=115618; +update datasetfieldvalue set value='19.216667' where id=115623; +update datasetfieldvalue set value='19.216667' where id=115630; +update datasetfieldvalue set value='69.583333' where id=115655; +update datasetfieldvalue set value='69.583333' where id=115656; +update datasetfieldvalue set value='69.583333' where id=115682; +update datasetfieldvalue set value='69.583333' where id=115695; +update datasetfieldvalue set value='19.216667' where id=115699; +update datasetfieldvalue set value='19.216667' where id=115702; +update datasetfieldvalue set value='69.583333' where id=115705; +update datasetfieldvalue set value='69.583333' where id=115707; +update datasetfieldvalue set value='19.216667' where id=115727; +update datasetfieldvalue set value='19.216667' where id=115730; +update datasetfieldvalue set value='69.583333' where id=115747; +update datasetfieldvalue set value='69.583333' where id=115751; +update datasetfieldvalue set value='19.216667' where id=115764; +update datasetfieldvalue set value='19.216667' where id=115781; +update datasetfieldvalue set value='69.583333' where id=115791; +update datasetfieldvalue set value='19.216667' where id=115801; +update datasetfieldvalue set value='19.216667' where id=115822; +update datasetfieldvalue set value='69.583333' where id=115825; +update datasetfieldvalue set value='19.216667' where id=115830; +update datasetfieldvalue set value='69.583333' where id=115847; +update datasetfieldvalue set value='69.583333' where id=115849; +update datasetfieldvalue set value='19.216667' where id=115854; +update datasetfieldvalue set value='69.583333' where id=115878; +update datasetfieldvalue set value='69.583333' where id=115882; +update datasetfieldvalue set value='19.216667' where id=115894; +update datasetfieldvalue set value='19.216667' where id=115903; +update datasetfieldvalue set value='19.216667' where id=115933; +update datasetfieldvalue set value='69.583333' where id=115948; +update datasetfieldvalue set value='19.216667' where id=115953; +update datasetfieldvalue set value='69.583333' where id=115954; +update datasetfieldvalue set value='69.583333' where id=115958; +update datasetfieldvalue set value='69.583333' where id=115980; +update datasetfieldvalue set value='19.216667' where id=115981; +update datasetfieldvalue set value='19.216667' where id=115983; +update datasetfieldvalue set value='19.216667' where id=116000; +update datasetfieldvalue set value='69.583333' where id=116002; +update datasetfieldvalue set value='69.583333' where id=116004; +update datasetfieldvalue set value='19.216667' where id=116027; +update datasetfieldvalue set value='19.216667' where id=116052; +update datasetfieldvalue set value='19.216667' where id=116060; +update datasetfieldvalue set value='69.583333' where id=116069; +update datasetfieldvalue set value='69.583333' where id=116072; +update datasetfieldvalue set value='69.583333' where id=116084; +update datasetfieldvalue set value='19.216667' where id=116098; +update datasetfieldvalue set value='69.583333' where id=116111; +update datasetfieldvalue set value='19.216667' where id=116117; +update datasetfieldvalue set value='19.216667' where id=116138; +update datasetfieldvalue set value='69.583333' where id=116144; +update datasetfieldvalue set value='69.583333' where id=116162; +update datasetfieldvalue set value='19.216667' where id=116165; +update datasetfieldvalue set value='19.216667' where id=116168; +update datasetfieldvalue set value='19.216667' where id=116185; +update datasetfieldvalue set value='69.583333' where id=116188; +update datasetfieldvalue set value='69.583333' where id=116206; +update datasetfieldvalue set value='19.216667' where id=116215; +update datasetfieldvalue set value='69.583333' where id=116233; +update datasetfieldvalue set value='69.583333' where id=116235; +update datasetfieldvalue set value='19.216667' where id=116242; +update datasetfieldvalue set value='69.583333' where id=116278; +update datasetfieldvalue set value='69.583333' where id=116280; +update datasetfieldvalue set value='19.216667' where id=116286; +update datasetfieldvalue set value='19.216667' where id=116289; +update datasetfieldvalue set value='69.583333' where id=116297; +update datasetfieldvalue set value='69.583333' where id=116309; +update datasetfieldvalue set value='19.216667' where id=116312; +update datasetfieldvalue set value='19.216667' where id=116327; +update datasetfieldvalue set value='69.583333' where id=116337; +update datasetfieldvalue set value='19.216667' where id=116352; +update datasetfieldvalue set value='19.216667' where id=116370; +update datasetfieldvalue set value='69.583333' where id=116372; +update datasetfieldvalue set value='19.216667' where id=116391; +update datasetfieldvalue set value='69.583333' where id=116400; +update datasetfieldvalue set value='69.583333' where id=116407; +update datasetfieldvalue set value='19.216667' where id=116414; +update datasetfieldvalue set value='19.216667' where id=116422; +update datasetfieldvalue set value='69.583333' where id=116436; +update datasetfieldvalue set value='69.583333' where id=116448; +update datasetfieldvalue set value='19.216667' where id=116459; +update datasetfieldvalue set value='19.216667' where id=116460; +update datasetfieldvalue set value='69.583333' where id=116461; +update datasetfieldvalue set value='19.216667' where id=116464; +update datasetfieldvalue set value='69.583333' where id=116471; +update datasetfieldvalue set value='19.216667' where id=116506; +update datasetfieldvalue set value='19.216667' where id=116513; +update datasetfieldvalue set value='69.583333' where id=116521; +update datasetfieldvalue set value='69.583333' where id=116523; +update datasetfieldvalue set value='19.216667' where id=116561; +update datasetfieldvalue set value='19.216667' where id=116564; +update datasetfieldvalue set value='69.583333' where id=116565; +update datasetfieldvalue set value='69.583333' where id=116585; +update datasetfieldvalue set value='19.216667' where id=116593; +update datasetfieldvalue set value='19.216667' where id=116602; +update datasetfieldvalue set value='69.583333' where id=116619; +update datasetfieldvalue set value='69.583333' where id=116621; +update datasetfieldvalue set value='19.216667' where id=116651; +update datasetfieldvalue set value='69.583333' where id=116652; +update datasetfieldvalue set value='69.583333' where id=116658; +update datasetfieldvalue set value='19.216667' where id=116668; +update datasetfieldvalue set value='69.583333' where id=116680; +update datasetfieldvalue set value='69.583333' where id=116685; +update datasetfieldvalue set value='19.216667' where id=116701; +update datasetfieldvalue set value='19.216667' where id=116710; +update datasetfieldvalue set value='19.216667' where id=116717; +update datasetfieldvalue set value='19.216667' where id=116725; +update datasetfieldvalue set value='69.583333' where id=116734; +update datasetfieldvalue set value='69.583333' where id=116746; +update datasetfieldvalue set value='69.583333' where id=116761; +update datasetfieldvalue set value='19.216667' where id=116764; +update datasetfieldvalue set value='19.216667' where id=116791; +update datasetfieldvalue set value='69.583333' where id=116793; +update datasetfieldvalue set value='19.216667' where id=116809; +update datasetfieldvalue set value='69.583333' where id=116823; +update datasetfieldvalue set value='19.216667' where id=116824; +update datasetfieldvalue set value='69.583333' where id=116836; +update datasetfieldvalue set value='69.583333' where id=116864; +update datasetfieldvalue set value='69.583333' where id=116872; +update datasetfieldvalue set value='19.216667' where id=116875; +update datasetfieldvalue set value='19.216667' where id=116879; +update datasetfieldvalue set value='19.216667' where id=116880; +update datasetfieldvalue set value='19.216667' where id=116903; +update datasetfieldvalue set value='69.583333' where id=116908; +update datasetfieldvalue set value='69.583333' where id=116920; +update datasetfieldvalue set value='69.583333' where id=116926; +update datasetfieldvalue set value='69.583333' where id=116950; +update datasetfieldvalue set value='19.216667' where id=116956; +update datasetfieldvalue set value='19.216667' where id=116958; +update datasetfieldvalue set value='69.583333' where id=116969; +update datasetfieldvalue set value='19.216667' where id=116977; +update datasetfieldvalue set value='69.583333' where id=116992; +update datasetfieldvalue set value='19.216667' where id=117000; +update datasetfieldvalue set value='69.583333' where id=117012; +update datasetfieldvalue set value='19.216667' where id=117016; +update datasetfieldvalue set value='69.583333' where id=117030; +update datasetfieldvalue set value='19.216667' where id=117046; +update datasetfieldvalue set value='69.583333' where id=117052; +update datasetfieldvalue set value='19.216667' where id=117061; +update datasetfieldvalue set value='19.216667' where id=117080; +update datasetfieldvalue set value='69.583333' where id=117084; +update datasetfieldvalue set value='69.583333' where id=117093; +update datasetfieldvalue set value='19.216667' where id=117106; +update datasetfieldvalue set value='19.216667' where id=117109; +update datasetfieldvalue set value='69.583333' where id=117125; +update datasetfieldvalue set value='19.216667' where id=117136; +update datasetfieldvalue set value='69.583333' where id=117140; +update datasetfieldvalue set value='19.216667' where id=117151; +update datasetfieldvalue set value='69.583333' where id=117152; +update datasetfieldvalue set value='69.583333' where id=117191; +update datasetfieldvalue set value='19.216667' where id=117198; +update datasetfieldvalue set value='19.216667' where id=117200; +update datasetfieldvalue set value='69.583333' where id=117210; +update datasetfieldvalue set value='69.583333' where id=117216; +update datasetfieldvalue set value='19.216667' where id=117237; +update datasetfieldvalue set value='19.216667' where id=117246; +update datasetfieldvalue set value='69.583333' where id=117254; +update datasetfieldvalue set value='69.583333' where id=117264; +update datasetfieldvalue set value='19.216667' where id=117271; +update datasetfieldvalue set value='69.583333' where id=117278; +update datasetfieldvalue set value='19.216667' where id=117290; +update datasetfieldvalue set value='19.216667' where id=117302; +update datasetfieldvalue set value='19.216667' where id=117319; +update datasetfieldvalue set value='69.583333' where id=117331; +update datasetfieldvalue set value='69.583333' where id=117336; +update datasetfieldvalue set value='19.216667' where id=117351; +update datasetfieldvalue set value='19.216667' where id=117358; +update datasetfieldvalue set value='69.583333' where id=117374; +update datasetfieldvalue set value='69.583333' where id=117377; +update datasetfieldvalue set value='19.216667' where id=117401; +update datasetfieldvalue set value='69.583333' where id=117402; +update datasetfieldvalue set value='19.216667' where id=117417; +update datasetfieldvalue set value='69.583333' where id=117420; +update datasetfieldvalue set value='19.216667' where id=117456; +update datasetfieldvalue set value='69.583333' where id=117457; +update datasetfieldvalue set value='19.216667' where id=117463; +update datasetfieldvalue set value='69.583333' where id=117467; +update datasetfieldvalue set value='19.216667' where id=117470; +update datasetfieldvalue set value='69.583333' where id=117487; +update datasetfieldvalue set value='19.216667' where id=117488; +update datasetfieldvalue set value='69.583333' where id=117506; +update datasetfieldvalue set value='19.216667' where id=117514; +update datasetfieldvalue set value='69.583333' where id=117536; +update datasetfieldvalue set value='69.583333' where id=117540; +update datasetfieldvalue set value='19.216667' where id=117547; +update datasetfieldvalue set value='69.583333' where id=117553; +update datasetfieldvalue set value='69.583333' where id=117556; +update datasetfieldvalue set value='19.216667' where id=117575; +update datasetfieldvalue set value='19.216667' where id=117591; +update datasetfieldvalue set value='69.583333' where id=117597; +update datasetfieldvalue set value='69.583333' where id=117617; +update datasetfieldvalue set value='19.216667' where id=117619; +update datasetfieldvalue set value='19.216667' where id=117634; +update datasetfieldvalue set value='69.583333' where id=117648; +update datasetfieldvalue set value='19.216667' where id=117649; +update datasetfieldvalue set value='69.583333' where id=117650; +update datasetfieldvalue set value='19.216667' where id=117666; +update datasetfieldvalue set value='19.216667' where id=117695; +update datasetfieldvalue set value='69.583333' where id=117702; +update datasetfieldvalue set value='19.216667' where id=117711; +update datasetfieldvalue set value='69.583333' where id=117713; +update datasetfieldvalue set value='69.583333' where id=117723; +update datasetfieldvalue set value='19.216667' where id=117732; +update datasetfieldvalue set value='69.583333' where id=117734; +update datasetfieldvalue set value='19.216667' where id=117759; +update datasetfieldvalue set value='69.583333' where id=117773; +update datasetfieldvalue set value='19.216667' where id=117789; +update datasetfieldvalue set value='69.583333' where id=117792; +update datasetfieldvalue set value='19.216667' where id=117797; +update datasetfieldvalue set value='19.216667' where id=117812; +update datasetfieldvalue set value='19.216667' where id=117817; +update datasetfieldvalue set value='69.583333' where id=117831; +update datasetfieldvalue set value='69.583333' where id=117835; +update datasetfieldvalue set value='69.583333' where id=117855; +update datasetfieldvalue set value='19.216667' where id=117876; +update datasetfieldvalue set value='69.583333' where id=117879; +update datasetfieldvalue set value='19.216667' where id=117883; +update datasetfieldvalue set value='19.216667' where id=117891; +update datasetfieldvalue set value='69.583333' where id=117899; +update datasetfieldvalue set value='19.216667' where id=117914; +update datasetfieldvalue set value='69.583333' where id=117917; +update datasetfieldvalue set value='19.216667' where id=117935; +update datasetfieldvalue set value='69.583333' where id=117951; +update datasetfieldvalue set value='19.216667' where id=117953; +update datasetfieldvalue set value='69.583333' where id=117962; +update datasetfieldvalue set value='19.216667' where id=117977; +update datasetfieldvalue set value='69.583333' where id=117984; +update datasetfieldvalue set value='19.216667' where id=117990; +update datasetfieldvalue set value='69.583333' where id=117991; +update datasetfieldvalue set value='19.216667' where id=118016; +update datasetfieldvalue set value='19.216667' where id=118025; +update datasetfieldvalue set value='69.583333' where id=118038; +update datasetfieldvalue set value='69.583333' where id=118053; +update datasetfieldvalue set value='19.216667' where id=118063; +update datasetfieldvalue set value='69.583333' where id=118070; +update datasetfieldvalue set value='19.216667' where id=118075; +update datasetfieldvalue set value='69.583333' where id=118096; +update datasetfieldvalue set value='19.216667' where id=118102; +update datasetfieldvalue set value='19.216667' where id=118121; +update datasetfieldvalue set value='69.583333' where id=118129; +update datasetfieldvalue set value='69.583333' where id=118139; +update datasetfieldvalue set value='19.216667' where id=118141; +update datasetfieldvalue set value='19.216667' where id=118155; +update datasetfieldvalue set value='69.583333' where id=118161; +update datasetfieldvalue set value='69.583333' where id=118168; +update datasetfieldvalue set value='19.216667' where id=118189; +update datasetfieldvalue set value='69.583333' where id=118198; +update datasetfieldvalue set value='69.583333' where id=118201; +update datasetfieldvalue set value='19.216667' where id=118218; +update datasetfieldvalue set value='19.216667' where id=118231; +update datasetfieldvalue set value='69.583333' where id=118239; +update datasetfieldvalue set value='69.583333' where id=118241; +update datasetfieldvalue set value='19.216667' where id=118259; +update datasetfieldvalue set value='19.216667' where id=118266; +update datasetfieldvalue set value='19.216667' where id=118269; +update datasetfieldvalue set value='69.583333' where id=118276; +update datasetfieldvalue set value='69.583333' where id=118295; +update datasetfieldvalue set value='69.583333' where id=118308; +update datasetfieldvalue set value='19.216667' where id=118312; +update datasetfieldvalue set value='19.216667' where id=118340; +update datasetfieldvalue set value='69.583333' where id=118346; +update datasetfieldvalue set value='69.583333' where id=118352; +update datasetfieldvalue set value='19.216667' where id=118364; +update datasetfieldvalue set value='19.216667' where id=118367; +update datasetfieldvalue set value='69.583333' where id=118377; +update datasetfieldvalue set value='19.216667' where id=118410; +update datasetfieldvalue set value='19.216667' where id=118418; +update datasetfieldvalue set value='69.583333' where id=118421; +update datasetfieldvalue set value='69.583333' where id=118431; +update datasetfieldvalue set value='69.583333' where id=118440; +update datasetfieldvalue set value='19.216667' where id=118465; +update datasetfieldvalue set value='69.583333' where id=118466; +update datasetfieldvalue set value='19.216667' where id=118470; +update datasetfieldvalue set value='69.583333' where id=118490; +update datasetfieldvalue set value='19.216667' where id=118492; +update datasetfieldvalue set value='19.216667' where id=118504; +update datasetfieldvalue set value='69.583333' where id=118515; +update datasetfieldvalue set value='69.583333' where id=118519; +update datasetfieldvalue set value='69.583333' where id=118527; +update datasetfieldvalue set value='19.216667' where id=118534; +update datasetfieldvalue set value='19.216667' where id=118543; +update datasetfieldvalue set value='19.216667' where id=118562; +update datasetfieldvalue set value='69.583333' where id=118578; +update datasetfieldvalue set value='69.583333' where id=118582; +update datasetfieldvalue set value='19.216667' where id=118587; +update datasetfieldvalue set value='19.216667' where id=118624; +update datasetfieldvalue set value='69.583333' where id=118630; +update datasetfieldvalue set value='69.583333' where id=118637; +update datasetfieldvalue set value='19.216667' where id=118642; +update datasetfieldvalue set value='19.216667' where id=118661; +update datasetfieldvalue set value='19.216667' where id=118671; +update datasetfieldvalue set value='69.583333' where id=118677; +update datasetfieldvalue set value='69.583333' where id=118679; +update datasetfieldvalue set value='69.583333' where id=118691; +update datasetfieldvalue set value='69.583333' where id=118698; +update datasetfieldvalue set value='19.216667' where id=118699; +update datasetfieldvalue set value='19.216667' where id=118716; +update datasetfieldvalue set value='19.216667' where id=118729; +update datasetfieldvalue set value='69.583333' where id=118730; +update datasetfieldvalue set value='69.583333' where id=118733; +update datasetfieldvalue set value='19.216667' where id=118738; +update datasetfieldvalue set value='19.216667' where id=118771; +update datasetfieldvalue set value='69.583333' where id=118806; +update datasetfieldvalue set value='19.216667' where id=118807; +update datasetfieldvalue set value='69.583333' where id=118810; +update datasetfieldvalue set value='19.216667' where id=118812; +update datasetfieldvalue set value='69.583333' where id=118832; +update datasetfieldvalue set value='69.583333' where id=118838; +update datasetfieldvalue set value='19.216667' where id=118845; +update datasetfieldvalue set value='19.216667' where id=118856; +update datasetfieldvalue set value='69.583333' where id=118860; +update datasetfieldvalue set value='19.216667' where id=118861; +update datasetfieldvalue set value='69.583333' where id=118873; +update datasetfieldvalue set value='19.216667' where id=118896; +update datasetfieldvalue set value='69.583333' where id=118918; +update datasetfieldvalue set value='19.216667' where id=118923; +update datasetfieldvalue set value='69.583333' where id=118933; +update datasetfieldvalue set value='69.583333' where id=118962; +update datasetfieldvalue set value='19.216667' where id=118964; +update datasetfieldvalue set value='19.216667' where id=118975; +update datasetfieldvalue set value='69.583333' where id=118978; +update datasetfieldvalue set value='69.583333' where id=118988; +update datasetfieldvalue set value='19.216667' where id=118991; +update datasetfieldvalue set value='69.583333' where id=119011; +update datasetfieldvalue set value='19.216667' where id=119016; +update datasetfieldvalue set value='19.216667' where id=119030; +update datasetfieldvalue set value='69.583333' where id=119041; +update datasetfieldvalue set value='69.583333' where id=119043; +update datasetfieldvalue set value='19.216667' where id=119063; +update datasetfieldvalue set value='69.583333' where id=119073; +update datasetfieldvalue set value='19.216667' where id=119076; +update datasetfieldvalue set value='69.583333' where id=119078; +update datasetfieldvalue set value='19.216667' where id=119083; +update datasetfieldvalue set value='69.583333' where id=119112; +update datasetfieldvalue set value='19.216667' where id=119126; +update datasetfieldvalue set value='69.583333' where id=119133; +update datasetfieldvalue set value='19.216667' where id=119145; +update datasetfieldvalue set value='69.583333' where id=119156; +update datasetfieldvalue set value='19.216667' where id=119165; +update datasetfieldvalue set value='69.583333' where id=119170; +update datasetfieldvalue set value='19.216667' where id=119179; +update datasetfieldvalue set value='19.216667' where id=119203; +update datasetfieldvalue set value='19.216667' where id=119205; +update datasetfieldvalue set value='69.583333' where id=119211; +update datasetfieldvalue set value='69.583333' where id=119214; +update datasetfieldvalue set value='69.583333' where id=119232; +update datasetfieldvalue set value='19.216667' where id=119237; +update datasetfieldvalue set value='19.216667' where id=119240; +update datasetfieldvalue set value='69.583333' where id=119253; +update datasetfieldvalue set value='69.583333' where id=119283; +update datasetfieldvalue set value='19.216667' where id=119309; +update datasetfieldvalue set value='69.583333' where id=119310; +update datasetfieldvalue set value='19.216667' where id=119311; +update datasetfieldvalue set value='19.216667' where id=119323; +update datasetfieldvalue set value='19.216667' where id=119332; +update datasetfieldvalue set value='69.583333' where id=119334; +update datasetfieldvalue set value='69.583333' where id=119348; +update datasetfieldvalue set value='69.583333' where id=119363; +update datasetfieldvalue set value='19.216667' where id=119382; +update datasetfieldvalue set value='19.216667' where id=119393; +update datasetfieldvalue set value='69.583333' where id=119397; +update datasetfieldvalue set value='69.583333' where id=119416; +update datasetfieldvalue set value='19.216667' where id=119419; +update datasetfieldvalue set value='19.216667' where id=119421; +update datasetfieldvalue set value='69.583333' where id=119428; +update datasetfieldvalue set value='19.216667' where id=119458; +update datasetfieldvalue set value='19.216667' where id=119473; +update datasetfieldvalue set value='69.583333' where id=119476; +update datasetfieldvalue set value='69.583333' where id=119482; +update datasetfieldvalue set value='69.583333' where id=119494; +update datasetfieldvalue set value='69.583333' where id=119500; +update datasetfieldvalue set value='19.216667' where id=119514; +update datasetfieldvalue set value='19.216667' where id=119515; +update datasetfieldvalue set value='82.86' where id=122055; +update datasetfieldvalue set value='6.36' where id=122072; +update datasetfieldvalue set value='82.92' where id=122078; +update datasetfieldvalue set value='6.12' where id=122094; +update datasetfieldvalue set value='110.13892' where id=124620; +update datasetfieldvalue set value='38.93792' where id=124633; +update datasetfieldvalue set value='10.7833' where id=126264; +update datasetfieldvalue set value='59.6667' where id=126266; +update datasetfieldvalue set value='19.525' where id=126584; +update datasetfieldvalue set value='75.919' where id=126585; +update datasetfieldvalue set value='19.745' where id=126586; +update datasetfieldvalue set value='19.175' where id=126590; +update datasetfieldvalue set value='75.918' where id=126591; +update datasetfieldvalue set value='76.400' where id=126596; +update datasetfieldvalue set value='76.069' where id=126600; +update datasetfieldvalue set value='21.861' where id=126603; +update datasetfieldvalue set value='78.79730' where id=127757; +update datasetfieldvalue set value='79.95' where id=129615; +update datasetfieldvalue set value='5.45' where id=129621; +update datasetfieldvalue set value='25.4698775' where id=131167; +update datasetfieldvalue set value='24.640115' where id=131171; +update datasetfieldvalue set value='69.5056022' where id=131172; +update datasetfieldvalue set value='70.7038205' where id=131175; +update datasetfieldvalue set value='64.00' where id=131810; +update datasetfieldvalue set value='69.00' where id=131811; +update datasetfieldvalue set value='60.2' where id=132959; +update datasetfieldvalue set value='60.5' where id=132960; +update datasetfieldvalue set value='78.5' where id=133216; +update datasetfieldvalue set value='78.3' where id=133223; +update datasetfieldvalue set value='5.5' where id=133226; +update datasetfieldvalue set value='23.916667' where id=134095; +update datasetfieldvalue set value='31' where id=134106; +update datasetfieldvalue set value='69.75' where id=134118; +update datasetfieldvalue set value='77' where id=134120; +update datasetfieldvalue set value='-82.0' where id=37242; +update datasetfieldvalue set value='-79.0' where id=37234; +update datasetfieldvalue set value='-2.0' where id=37232; +update datasetfieldvalue set value='-4.0' where id=37252; +update datasetfieldvalue set value='-82.0' where id=33052; +update datasetfieldvalue set value='-79.0' where id=33054; +update datasetfieldvalue set value='-2.0' where id=33055; +update datasetfieldvalue set value='-4.0' where id=33053; +update datasetfieldvalue set value='9.333333' where id=23944; +update datasetfieldvalue set value='9.666667' where id=23936; +update datasetfieldvalue set value='78.666667' where id=23900; +update datasetfieldvalue set value='78.5' where id=23940; +update datasetfieldvalue set value='9.333333' where id=22494; +update datasetfieldvalue set value='9.666667' where id=22496; +update datasetfieldvalue set value='78.666667' where id=22495; +update datasetfieldvalue set value='78.5' where id=22497; +update datasetfieldvalue set value='-27' where id=135009; +update datasetfieldvalue set value='-26' where id=135023; +update datasetfieldvalue set value='37' where id=135021; +update datasetfieldvalue set value='35' where id=135012; diff --git a/distros/dataverse.no/migration/cleanup-database.sql b/distros/dataverse.no/migration/cleanup-database.sql new file mode 100644 index 0000000..16c9490 --- /dev/null +++ b/distros/dataverse.no/migration/cleanup-database.sql @@ -0,0 +1,6 @@ + +alter table authenticateduser drop constraint authenticateduser_email_key; +drop index index_authenticateduser_lower_email; +Update authenticateduser set email='noreply@uit.no'; + +update datasetfieldvalue set value='noreply@uit.no' where datasetfield_id in (select id from datasetfield where datasetfieldtype_id=15); diff --git a/distros/dataverse.no/migration/create-backup-db.sh b/distros/dataverse.no/migration/create-backup-db.sh new file mode 100644 index 0000000..4646ce8 --- /dev/null +++ b/distros/dataverse.no/migration/create-backup-db.sh @@ -0,0 +1,11 @@ +#|/bin/bash +docker exec -it postgres bash -c "pg_dump -U dataverse dataverse > /var/lib/postgresql/data/dataverse.dump" +gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverse$(date +'%Y%m%d').dump.gz" +docker exec -it postgres bash -c "createdb -U dataverse dataverse-tmp" +docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/dataverse.dump" +docker cp ./cleanup-database.sql postgres:/var/lib/postgresql/data/ +docker exec -it postgres bash -c "psql -U dataverse dataverse-tmp -f /var/lib/postgresql/data/cleanup-database.sql" +docker exec -it postgres bash -c "pg_dump -U dataverse dataverse-tmp > /var/lib/postgresql/data/dataverseCL.dump" +docker exec -it postgres bash -c "dropdb -U dataverse dataverse-tmp" +gzip -c /extdisk/database-data-demo/dataverse.dump > "/extdisk/database-data-demo/dataverseCL$(date +'%Y%m%d').dump.gz" + diff --git a/distros/dataverse.no/migration/createDBfreomDump.sql b/distros/dataverse.no/migration/createDBfreomDump.sql new file mode 100644 index 0000000..3f50c1d --- /dev/null +++ b/distros/dataverse.no/migration/createDBfreomDump.sql @@ -0,0 +1,3 @@ +dropdb -U dataverse dataverse; +createdb -U dataverse dataverse; +psql -U dataverse dataverse -f /mnttmp/opendata*.sql diff --git a/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql new file mode 100644 index 0000000..a0b6ac8 --- /dev/null +++ b/distros/dataverse.no/migration/dvno_geolocation_cleaning20240322.sql @@ -0,0 +1,6 @@ +update datasetfieldvalue set value='7.667742' where id=210693; +update datasetfieldvalue set value='7.667742' where id=210679; +update datasetfieldvalue set value='64.642997' where id=210678; +update datasetfieldvalue set value='64.642997' where id=210692; +update datasetfieldvalue set value='71.5' where id=206388; +update datasetfieldvalue set value='69' where id=206399 diff --git a/distros/dataverse.no/migration/replaceDatabase.sh b/distros/dataverse.no/migration/replaceDatabase.sh new file mode 100644 index 0000000..3da3cd4 --- /dev/null +++ b/distros/dataverse.no/migration/replaceDatabase.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +docker cp ./replaceDatabase.sql postgres:/tmp + +docker exec -it postgres bash -c "su - postgres;psql -U dataverse dataverse -f /tmp/replaceDatabase.sql" + diff --git a/distros/dataverse.no/migration/replaceDatabase.sql b/distros/dataverse.no/migration/replaceDatabase.sql new file mode 100644 index 0000000..6d0d252 --- /dev/null +++ b/distros/dataverse.no/migration/replaceDatabase.sql @@ -0,0 +1,8 @@ +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE dtype='Dataset' and storageidentifier like '%file://%'; + +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); +UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'local://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + + + + diff --git a/distros/dataverse.no/modification/Bundle.properties b/distros/dataverse.no/modification/Bundle.properties new file mode 100644 index 0000000..af02853 --- /dev/null +++ b/distros/dataverse.no/modification/Bundle.properties @@ -0,0 +1,2698 @@ +dataverse=Dataverse +newDataverse=New Dataverse +hostDataverse=Host Dataverse +dataverses=Dataverses +passwd=Password +dataset=Dataset +datasets=Datasets +newDataset=New Dataset +files=Files +file=File +public=Public +restricted=Restricted +restrictedaccess=Restricted with Access Granted +find=Find +search=Search +language=Language +created=Created +deposited=Deposited +published=Published +unpublished=Unpublished +cancel=Cancel +ok=OK +saveChanges=Save Changes +acceptTerms=Accept +submit=Submit +signup=Sign Up +login=Log In +email=Email +account=Account +requiredField=Required field +new=New +identifier=Identifier +description=Description +subject=Subject +close=Close +preview=Preview +continue=Continue +name=Name +institution=Institution +position=Position +affiliation=Affiliation +storage=Storage +createDataverse=Create Dataverse +remove=Remove +done=Done +editor=Contributor +manager=Manager +curator=Curator +explore=Explore +download=Download +downloadOriginal=Original Format +downloadArchival=Archival Format (.tab) +deaccession=Deaccession +share=Share +link=Link +linked=Linked +harvested=Harvested +apply=Apply +add=Add +delete=Delete +copyClipboard=Copy to Clipboard +truncateMoreBtn=Read full {0} [+] +truncateMoreTip=Click to read the full {0}. +truncateLessBtn=Collapse {0} [+] +truncateLessTip=Click to collapse the {0}. +yes=Yes +no=No +previous=Previous +next=Next +first=First +last=Last +more=More... +less=Less... +select=Select... +selectedFiles=Selected Files +htmlAllowedTitle=Allowed HTML Tags +htmlAllowedMsg=This field supports only certain HTML tags. +htmlAllowedTags=<a>, <b>, <blockquote>, <br>, <code>, <del>, <dd>, <dl>, <dt>, <em>, <hr>, <h1>-<h3>, <i>, <img>, <kbd>, <li>, <ol>, <p>, <pre>, <s>, <sup>, <sub>, <strong>, <strike>, <u>, <ul> +conditionalRequiredMsg=One or more of these fields may become required if you add to one or more of these optional fields. +conditionalRequiredMsg.tooltip=This field will become required if you choose to enter values in one or more of these optional fields. +toggleNavigation=Toggle navigation +defaultBody=Default Body +loading=Loading... +filter=Filter +to=to +of=of +alt.logo={0} logo +alt.homepage={0} homepage + +# dataverse_header.xhtml +header.noscript=Please enable JavaScript in your browser. It is required to use most of the features of Dataverse. +header.status.header=Status +header.search.title=Search all dataverses... +header.about=About +header.support=Support +header.guides=Guides +header.guides.user=User Guide +header.guides.developer=Developer Guide +header.guides.installation=Installation Guide +header.guides.api=API Guide +header.guides.admin=Admin Guide +header.signUp=Sign Up +header.logOut=Log Out +header.accountInfo=Account Information +header.dashboard=Dashboard +header.user.selectTab.dataRelated=My Data +header.user.selectTab.notifications=Notifications +header.user.selectTab.accountInfo=Account Information +header.user.selectTab.groupsAndRoles=Groups + Roles +header.user.selectTab.apiToken=API Token + +# dataverse_template.xhtml +head.meta.description=The Dataverse Project is an open source software application to share, cite and archive data. Dataverse provides a robust infrastructure for data stewards to host and archive data, while offering researchers an easy way to share and get credit for their data. +body.skip=Skip to main content + +# dataverse_footer.xhtml +footer.copyright=Copyright © {0} +footer.widget.datastored=Data is stored at {0}. +footer.widget.login=Log in to +footer.privacyPolicy=Privacy Policy +footer.poweredby=Powered by +footer.dataverseProject=The Dataverse Project + +# messages.xhtml +messages.error=Error +messages.success=Success! +messages.info=Info +messages.validation=Validation Error +messages.validation.msg=Required fields were missed or there was a validation error. Please scroll down to see details. + +# contactFormFragment.xhtml +contact.header=Contact {0} +contact.dataverse.header=Email Dataverse Contact +contact.dataset.header=Email Dataset Contact +contact.to=To +contact.support=Support +contact.from=From +contact.from.required=User email is required. +contact.from.invalid=Email is invalid. +contact.subject=Subject +contact.subject.required=Subject is required. +contact.subject.selectTab.top=Select subject... +contact.subject.selectTab.support=Support Question +contact.subject.selectTab.dataIssue=Data Issue +contact.msg=Message +contact.msg.required=Message text is required. +contact.send=Send Message +contact.question=Please fill this out to prove you are not a robot. +contact.sum.title=Human Access Validation Answer +contact.sum.required=Value is required. +contact.sum.invalid=Incorrect sum, please try again. +contact.sum.converterMessage=Please enter a number. +contact.contact=Contact +# Bundle file editors, please note that these "contact.context" messages are used in tests. +contact.context.subject.dvobject={0} contact: {1} +contact.context.subject.support={0} support request: {1} +contact.context.dataverse.intro={0}You have just been sent the following message from {1} via the {2} hosted dataverse named "{3}":\n\n---\n\n +contact.context.dataverse.ending=\n\n---\n\n{0}\n{1}\n\nGo to dataverse {2}/dataverse/{3}\n\nYou received this email because you have been listed as a contact for the dataverse. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. +contact.context.dataverse.noContact=There is no contact address on file for this dataverse so this message is being sent to the system address.\n\n +contact.context.dataset.greeting.helloFirstLast=Hello {0} {1}, +contact.context.dataset.greeting.organization=Attention Dataset Contact: +contact.context.dataset.intro={0}\n\nYou have just been sent the following message from {1} via the {2} hosted dataset titled "{3}" ({4}):\n\n---\n\n +contact.context.dataset.ending=\n\n---\n\n{0}\n{1}\n\nGo to dataset {2}/dataset.xhtml?persistentId={3}\n\nYou received this email because you have been listed as a contact for the dataset. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. +contact.context.dataset.noContact=There is no contact address on file for this dataset so this message is being sent to the system address.\n\n---\n\n +contact.context.file.intro={0}\n\nYou have just been sent the following message from {1} via the {2} hosted file named "{3}" from the dataset titled "{4}" ({5}):\n\n---\n\n +contact.context.file.ending=\n\n---\n\n{0}\n{1}\n\nGo to file {2}/file.xhtml?fileId={3}\n\nYou received this email because you have been listed as a contact for the dataset. If you believe this was an error, please contact {4} at {5}. To respond directly to the individual who sent the message, simply reply to this email. +contact.context.support.intro={0},\n\nThe following message was sent from {1}.\n\n---\n\n +contact.context.support.ending=\n\n---\n\nMessage sent from Support contact form. + +# dataverseuser.xhtml +account.info=Account Information +account.edit=Edit Account +account.apiToken=API Token +account.emailvalidation.header=Email Validation +account.emailvalidation.token.exists=A verification email has been sent to {0}. Please check your inbox. +user.isShibUser=Account information cannot be edited when logged in through an institutional account. +user.helpShibUserMigrateOffShibBeforeLink=Leaving your institution? Please contact +user.helpShibUserMigrateOffShibAfterLink=for assistance. +user.helpOAuthBeforeLink=Your Dataverse account uses {0} for login. If you are interested in changing login methods, please contact +user.helpOAuthAfterLink=for assistance. +user.lostPasswdTip=If you have lost or forgotten your password, please enter your username or email address below and click Submit. We will send you an e-mail with your new password. +user.dataRelatedToMe=My Data +wasCreatedIn=, was created in +wasCreatedTo=, was added to +wasSubmittedForReview=, was submitted for review to be published in +wasPublished=, was published in +wasReturnedByReviewer=, was returned by the curator of +# TODO: Confirm that "toReview" can be deleted. +toReview=Don't forget to publish it or send it back to the contributor! +# Bundle file editors, please note that "notification.welcome" is used in a unit test. +notification.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check our user guides at http://site.uit.no/dataverseno/deposit/ or contact DataverseNO support for assistance: http://site.uit.no/dataverseno/support/. +notification.demoSite=Demo Site +notification.requestFileAccess=File access requested for dataset: {0} was made by {1} ({2}). +notification.grantFileAccess=Access granted for files in dataset: {0}. +notification.rejectFileAccess=Access rejected for requested files in dataset: {0}. +notification.createDataverse={0} was created in {1} . To learn more about what you can do with your dataverse, check out the {2}. +notification.dataverse.management.title=Dataverse Management - Dataverse User Guide +notification.createDataset={0} was created in {1}. To learn more about what you can do with a dataset, check out the {2}. +notification.dataset.management.title=Dataset Management - Dataset User Guide +notification.wasSubmittedForReview={0} was submitted for review to be published in {1}. Don''t forget to publish it or send it back to the contributor, {2} ({3})\! +notification.wasReturnedByReviewer={0} was returned by the curator of {1}. +notification.wasPublished={0} was published in {1}. +notification.publishFailedPidReg={0} in {1} could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +notification.workflowFailed=An external workflow run on {0} in {1} has failed. Check your email and/or view the Dataset page which may have additional details. Contact support if this continues to happen. +notification.workflowSucceeded=An external workflow run on {0} in {1} has succeeded. Check your email and/or view the Dataset page which may have additional details. + +notification.ingestCompleted=Dataset {1} ingest has successfully finished. +notification.ingestCompletedWithErrors=Dataset {1} ingest has finished with errors. +notification.generic.objectDeleted=The dataverse, dataset, or file for this notification has been deleted. +notification.access.granted.dataverse=You have been granted the {0} role for {1}. +notification.access.granted.dataset=You have been granted the {0} role for {1}. +notification.access.granted.datafile=You have been granted the {0} role for file in {1}. +notification.access.granted.fileDownloader.additionalDataverse={0} You now have access to all published restricted and unrestricted files in this dataverse. +notification.access.granted.fileDownloader.additionalDataset={0} You now have access to all published restricted and unrestricted files in this dataset. +notification.access.revoked.dataverse=You have been removed from a role in {0}. +notification.access.revoked.dataset=You have been removed from a role in {0}. +notification.access.revoked.datafile=You have been removed from a role in {0}. +notification.checksumfail=One or more files in your upload failed checksum validation for dataset {1}. Please re-run the upload script. If the problem persists, please contact support. +notification.ingest.completed=Dataset {2} ingest process has successfully finished.

Ingested files:{3}
+notification.ingest.completedwitherrors=Dataset {2} ingest process has finished with errors.

Ingested files:{3}
+notification.mail.import.filesystem=Dataset {2} ({0}/dataset.xhtml?persistentId={1}) has been successfully uploaded and verified. +notification.import.filesystem=Dataset {1} has been successfully uploaded and verified. +notification.import.checksum={1}, dataset had file checksums added via a batch job. +removeNotification=Remove Notification +groupAndRoles.manageTips=Here is where you can access and manage all the groups you belong to, and the roles you have been assigned. +user.message.signup.label=Create Account +user.message.signup.tip=Why have a Dataverse account? To create your own dataverse and customize it, add datasets, or request access to restricted files. +user.signup.otherLogInOptions.tip=You can also create a Dataverse account with one of our other log in options. +user.username.illegal.tip=Between 2-60 characters, and can use "a-z", "0-9", "_" for your username. +user.username=Username +user.username.taken=This username is already taken. +user.username.invalid=This username contains an invalid character or is outside the length requirement (2-60 characters). +user.username.valid=Create a valid username of 2 to 60 characters in length containing letters (a-Z), numbers (0-9), dashes (-), underscores (_), and periods (.). +user.noPasswd=No Password +user.currentPasswd=Current Password +user.currentPasswd.tip=Please enter the current password for this account. +user.passwd.illegal.tip=Password needs to be at least 6 characters, include one letter and one number, and special characters may be used. +user.rePasswd=Retype Password +user.rePasswd.tip=Please retype the password you entered above. +user.firstName=Given Name +user.firstName.tip=The first name or name you would like to use for this account. +user.lastName=Family Name +user.lastName.tip=The last name you would like to use for this account. +user.email.tip=A valid email address you have access to in order to be contacted. +user.email.taken=This email address is already taken. +user.affiliation.tip=The organization with which you are affiliated. +user.position=Position +user.position.tip=Your role or title at the organization you are affiliated with; such as staff, faculty, student, etc. +user.acccountterms=General Terms of Use +user.acccountterms.tip=The terms and conditions for using the application and services. +user.acccountterms.required=Please check the box to indicate your acceptance of the General Terms of Use. +user.acccountterms.iagree=I have read and accept the Dataverse General Terms of Use as outlined above. +user.createBtn=Create Account +user.updatePassword.welcome=Welcome to Dataverse {0} +user.updatePassword.warning=With the release of our new Dataverse 4.0 upgrade, the password requirements and General Terms of Use have updated. As this is the first time you are using Dataverse since the update, you need to create a new password and agree to the new General Terms of Use. +user.updatePassword.password={0} +user.password=Password +user.newPassword=New Password +authenticationProvidersAvailable.tip={0}There are no active authentication providers{1}If you are a system administrator, please enable one using the API.{2}If you are not a system administrator, please contact the one for your institution. + +passwdVal.passwdReq.title=Your password must contain: +passwdVal.passwdReq.goodStrength=passwords of at least {0} characters are exempt from all other requirements +passwdVal.passwdReq.lengthReq=At least {0} characters +passwdVal.passwdReq.characteristicsReq=At least 1 character from {0} of the following types: +passwdVal.passwdReq.notInclude=It may not include: +passwdVal.passwdReq.consecutiveDigits=More than {0} numbers in a row +passwdVal.passwdReq.dictionaryWords=Dictionary words +passwdVal.passwdReq.unknownPasswordRule=Unknown, contact your administrator +#printf syntax used to pass to passay library +passwdVal.expireRule.errorCode=EXPIRED +passwdVal.expireRule.errorMsg=The password is over %1$s days old and has expired. +passwdVal.goodStrengthRule.errorMsg=Note: passwords are always valid with a %1$s or more character length regardless. +passwdVal.goodStrengthRule.errorCode=NO_GOODSTRENGTH +passwdVal.passwdReset.resetLinkTitle=Password Reset Link +passwdVal.passwdReset.resetLinkDesc=Your password reset link is not valid +passwdVal.passwdReset.resetInitiated=Password Reset Initiated +passwdVal.passwdReset.valBlankLog=new password is blank +passwdVal.passwdReset.valFacesError=Password Error +passwdVal.passwdReset.valFacesErrorDesc=Please enter a new password for your account. +passwdVal.passwdValBean.warnDictionaryRead=Dictionary was set, but none was read in. +passwdVal.passwdValBean.warnDictionaryObj=PwDictionaries not set and no default password file found: +passwdVal.passwdValBean.warnSetStrength=The PwGoodStrength {0} value competes with the PwMinLength value of {1} and is added to {2} + +# passwordreset.xhtml +pageTitle.passwdReset.pre=Account Password Reset +passwdReset.token=token : +passwdReset.userLookedUp=user looked up : +passwdReset.emailSubmitted=email submitted : +passwdReset.details={0} Password Reset{1} - To initiate the password reset process, please provide your email address. +passwdReset.submitRequest=Submit Password Request +passwdReset.successSubmit.tip=If this email is associated with an account, then an email will be sent with further instructions to {0}. +passwdReset.debug=DEBUG +passwdReset.resetUrl=The reset URL is +passwdReset.noEmail.tip=No email was actually sent because a user could not be found using the provided email address {0} but we don't mention this because we don't malicious users to use the form to determine if there is an account associated with an email address. +passwdReset.illegalLink.tip=Your password reset link is not valid. If you need to reset your password, {0}click here{1} in order to request that your password to be reset again. +passwdReset.newPasswd.details={0} Reset Password{1} \u2013 Our password requirements have changed. Please pick a strong password that matches the criteria below. +passwdReset.newPasswd=New Password +passwdReset.rePasswd=Retype Password +passwdReset.resetBtn=Reset Password + +#loginpage.xhtml +login.System=Login System +login.forgot.text=Forgot your password? +login.builtin=Dataverse Account +login.institution=Institutional Account +login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. +login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance. +login.builtin.credential.usernameOrEmail=Username/Email +login.builtin.credential.password=Password +login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +login.signup.blurb=Sign up for a Dataverse account. +login.echo.credential.name=Name +login.echo.credential.email=Email +login.echo.credential.affiliation=Affiliation +# how do we exercise login.error? Via a password upgrade failure? See https://github.com/IQSS/dataverse/pull/2922 +login.error=Error validating the username, email address, or password. Please try again. If the problem persists, contact an administrator. +user.error.cannotChangePassword=Sorry, your password cannot be changed. Please contact your system administrator. +user.error.wrongPassword=Sorry, wrong password. +login.button=Log In with {0} +login.button.orcid=Create or Connect your ORCID +# authentication providers +auth.providers.title=Other options +auth.providers.tip=You can convert a Dataverse account to use one of the options above. More information about account creation. +auth.providers.title.builtin=Username/Email +auth.providers.title.shib=Your Institution +auth.providers.title.orcid=ORCID +auth.providers.title.google=Google +auth.providers.title.github=GitHub +auth.providers.blurb=Log in or sign up with your {0} account — more information about account creation. Having trouble? Please contact {3} for assistance. +auth.providers.persistentUserIdName.orcid=ORCID iD +auth.providers.persistentUserIdName.github=ID +auth.providers.persistentUserIdTooltip.orcid=ORCID provides a persistent digital identifier that distinguishes you from other researchers. +auth.providers.persistentUserIdTooltip.github=GitHub assigns a unique number to every user. +auth.providers.insufficientScope=Dataverse was not granted the permission to read user data from {0}. +auth.providers.exception.userinfo=Error getting the user info record from {0}. +auth.providers.token.failRetrieveToken=Dataverse could not retrieve an access token. +auth.providers.token.failParseToken=Dataverse could not parse the access token. +auth.providers.token.failGetUser=Dataverse could not get your user record. Please consult your administrator. +auth.providers.orcid.helpmessage1=ORCID is an open, non-profit, community-based effort to provide a registry of unique researcher identifiers and a transparent method of linking research activities and outputs to these identifiers. ORCID is unique in its ability to reach across disciplines, research sectors, and national boundaries and its cooperation with other identifier systems. Find out more at orcid.org/about. +auth.providers.orcid.helpmessage2=This repository uses your ORCID for authentication (so you don't need another username/password combination). Having your ORCID associated with your datasets also makes it easier for people to find the datasets you have published. + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth + +#confirmemail.xhtml +confirmEmail.pageTitle=Email Verification +confirmEmail.submitRequest=Verify Email +confirmEmail.submitRequest.success=A verification email has been sent to {0}. Note, the verify link will expire after {1}. +confirmEmail.details.success=Email address verified! +confirmEmail.details.failure=We were unable to verify your email address. Please navigate to your Account Information page and click the "Verify Email" button. +confirmEmail.details.goToAccountPageButton=Go to Account Information +confirmEmail.notVerified=Not Verified +confirmEmail.verified=Verified + +#shib.xhtml +shib.btn.convertAccount=Convert Account +shib.btn.createAccount=Create Account +shib.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +# Bundle file editors, please note that "shib.welcomeExistingUserMessage" is used in a unit test +shib.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +# Bundle file editors, please note that "shib.welcomeExistingUserMessageDefaultInstitution" is used in a unit test +shib.welcomeExistingUserMessageDefaultInstitution=your institution +shib.dataverseUsername=Dataverse Username +shib.currentDataversePassword=Current Dataverse Password +shib.accountInformation=Account Information +shib.offerToCreateNewAccount=This information is provided by your institution and will be used to create your Dataverse account. +shib.passwordRejected=Validation Error - Your account can only be converted if you provide the correct password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. + +# oauth2/firstLogin.xhtml +oauth2.btn.convertAccount=Convert Existing Account +oauth2.btn.createAccount=Create New Account +oauth2.askToConvert=Would you like to convert your Dataverse account to always use your institutional log in? +oauth2.welcomeExistingUserMessage=Your institutional log in for {0} matches an email address already being used for a Dataverse account. By entering your current Dataverse password below, your existing Dataverse account can be converted to use your institutional log in. After converting, you will only need to use your institutional log in. +oauth2.welcomeExistingUserMessageDefaultInstitution=your institution +oauth2.dataverseUsername=Dataverse Username +oauth2.currentDataversePassword=Current Dataverse Password +oauth2.chooseUsername=Username: +oauth2.passwordRejected=Validation Error - Wrong username or password. +# oauth2.newAccount.title=Account Creation +oauth2.newAccount.welcomeWithName=Welcome to Dataverse, {0} +oauth2.newAccount.welcomeNoName=Welcome to Dataverse +# oauth2.newAccount.email=Email +# oauth2.newAccount.email.tip=Dataverse uses this email to notify you of issues regarding your data. +oauth2.newAccount.suggestedEmails=Suggested Email Addresses: +oauth2.newAccount.username=Username +oauth2.newAccount.username.tip=This username will be your unique identifier as a Dataverse user. +oauth2.newAccount.explanation=This information is provided by {0} and will be used to create your {1} account. To log in again, you will have to use the {0} log in option. +oauth2.newAccount.suggestConvertInsteadOfCreate=If you already have a {0} account, you will need to convert your account. +# oauth2.newAccount.tabs.convertAccount=Convert Existing Account +oauth2.newAccount.buttons.convertNewAccount=Convert Account +oauth2.newAccount.emailTaken=Email already taken. Consider merging the corresponding account instead. +oauth2.newAccount.emailOk=Email OK. +oauth2.newAccount.emailInvalid=Invalid email address. +# oauth2.newAccount.usernameTaken=Username already taken. +# oauth2.newAccount.usernameOk=Username OK. + +# oauth2/convert.xhtml +# oauth2.convertAccount.title=Account Conversion +oauth2.convertAccount.explanation=Please enter your {0} account username or email and password to convert your account to the {1} log in option. Learn more about converting your account. +oauth2.convertAccount.username=Existing username +oauth2.convertAccount.password=Password +oauth2.convertAccount.authenticationFailed=Your account can only be converted if you provide the correct username and password for your existing account. If your existing account has been deactivated by an administrator, you cannot convert your account. +oauth2.convertAccount.buttonTitle=Convert Account +oauth2.convertAccount.success=Your Dataverse account is now associated with your {0} account. +oauth2.convertAccount.failedDeactivated=Your existing account cannot be converted because it has been deactivated. + +# oauth2/callback.xhtml +oauth2.callback.page.title=OAuth Callback +oauth2.callback.message=Authentication Error - Dataverse could not authenticate your login with the provider that you selected. Please make sure you authorize your account to connect with Dataverse. For more details about the information being requested, see the User Guide. + +# deactivated user accounts +deactivated.error=Sorry, your account has been deactivated. + +# tab on dataverseuser.xhtml +apitoken.title=API Token +apitoken.message=Your API Token is valid for a year. Check out our {0}API Guide{1} for more information on using your API Token with the Dataverse APIs. +apitoken.notFound=API Token for {0} has not been created. +apitoken.expired.warning=This token is about to expire, please generate a new one. +apitoken.expired.error=This token is expired, please generate a new one. +apitoken.generateBtn=Create Token +apitoken.regenerateBtn=Recreate Token +apitoken.revokeBtn=Revoke Token +apitoken.expirationDate.label=Expiration Date + +#dashboard.xhtml +dashboard.title=Dashboard +dashboard.card.harvestingclients.header=Harvesting Clients +dashboard.card.harvestingclients.btn.manage=Manage Clients +dashboard.card.harvestingclients.clients={0, choice, 0#Clients|1#Client|2#Clients} +dashboard.card.harvestingclients.datasets={0, choice, 0#Datasets|1#Dataset|2#Datasets} +dashboard.card.harvestingserver.header=Harvesting Server +dashboard.card.harvestingserver.enabled=OAI server enabled +dashboard.card.harvestingserver.disabled=OAI server disabled +dashboard.card.harvestingserver.status=Status +dashboard.card.harvestingserver.sets={0, choice, 0#Sets|1#Set|2#Sets} +dashboard.card.harvestingserver.btn.manage=Manage Server +dashboard.card.metadataexport.header=Metadata Export +dashboard.card.metadataexport.message=Dataset metadata export is only available through the {0} API. Learn more in the {0} {1}API Guide{2}. + +#harvestclients.xhtml +harvestclients.title=Manage Harvesting Clients +harvestclients.toptip=Harvesting can be scheduled to run at a specific time or on demand. Harvesting can be initiated here or via the REST API. +harvestclients.noClients.label=No clients are configured. +harvestclients.noClients.why.header=What is Harvesting? +harvestclients.noClients.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting client, your Dataverse gathers metadata records from remote sources. These can be other Dataverse instances, or other archives that support OAI-PMH, the standard harvesting protocol. +harvestclients.noClients.why.reason2=Harvested metadata records are searchable by users. Clicking on a harvested dataset in the search results takes the user to the original repository. Harvested datasets cannot be edited in your Dataverse installation. +harvestclients.noClients.how.header=How To Use Harvesting +harvestclients.noClients.how.tip1=To harvest metadata, a Harvesting Client is created and configured for each remote repository. Note that when creating a client you will need to select an existing local dataverse to host harvested datasets. +harvestclients.noClients.how.tip2=Harvested records can be kept in sync with the original repository through scheduled incremental updates, for example, daily or weekly. Alternatively, harvests can be run on demand, from this page or via the REST API. +harvestclients.noClients.getStarted=To get started, click on the Add Client button above. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestclients.btn.add=Add Client +harvestclients.tab.header.name=Nickname +harvestclients.tab.header.url=URL +harvestclients.tab.header.lastrun=Last Run +harvestclients.tab.header.lastresults=Last Results +harvestclients.tab.header.action=Actions +harvestclients.tab.header.action.btn.run=Run Harvesting +harvestclients.tab.header.action.btn.edit=Edit +harvestclients.tab.header.action.btn.delete=Delete +harvestclients.tab.header.action.btn.delete.dialog.header=Delete Harvesting Client +harvestclients.tab.header.action.btn.delete.dialog.warning=Are you sure you want to delete the harvesting client "{0}"? Deleting the client will delete all datasets harvested from this remote server. +harvestclients.tab.header.action.btn.delete.dialog.tip=Note, this action may take a while to process, depending on the number of harvested datasets. +harvestclients.tab.header.action.delete.infomessage=Harvesting client is being deleted. Note, that this may take a while, depending on the amount of harvested content. +harvestclients.actions.runharvest.success=Successfully started an asynchronous harvest for client "{0}" . Please reload the page to check on the harvest results. +harvestclients.newClientDialog.step1=Step 1 of 4 - Client Information +harvestclients.newClientDialog.title.new=Create Harvesting Client +harvestclients.newClientDialog.help=Configure a client to harvest content from a remote server. +harvestclients.newClientDialog.nickname=Nickname +harvestclients.newClientDialog.nickname.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestclients.newClientDialog.nickname.required=Client nickname cannot be empty! +harvestclients.newClientDialog.nickname.invalid=Client nickname can contain only letters, digits, underscores (_) and dashes (-); and must be at most 30 characters. +harvestclients.newClientDialog.nickname.alreadyused=This nickname is already used. +harvestclients.newClientDialog.type=Server Protocol +harvestclients.newClientDialog.type.helptext=Only the OAI server protocol is currently supported. +harvestclients.newClientDialog.type.OAI=OAI +harvestclients.newClientDialog.type.Nesstar=Nesstar +harvestclients.newClientDialog.url=Server URL +harvestclients.newClientDialog.url.tip=URL of a harvesting resource. +harvestclients.newClientDialog.url.watermark=Remote harvesting server, http://... +harvestclients.newClientDialog.url.helptext.notvalidated=URL of a harvesting resource. Once you click 'Next', we will try to establish a connection to the server in order to verify that it is working, and to obtain extra information about its capabilities. +harvestclients.newClientDialog.url.required=A valid harvesting server address is required. +harvestclients.newClientDialog.url.invalid=Invalid URL. Failed to establish connection and receive a valid server response. +harvestclients.newClientDialog.url.noresponse=Failed to establish connection to the server. +harvestclients.newClientDialog.url.badresponse=Invalid response from the server. +harvestclients.newClientDialog.dataverse=Local Dataverse +harvestclients.newClientDialog.dataverse.tip=Dataverse that will host the datasets harvested from this remote resource. +harvestclients.newClientDialog.dataverse.menu.enterName=Enter Dataverse Alias +harvestclients.newClientDialog.dataverse.menu.header=Dataverse Name (Affiliate), Alias +harvestclients.newClientDialog.dataverse.menu.invalidMsg=No matches found +harvestclients.newClientDialog.dataverse.required=You must select an existing dataverse for this harvesting client. +harvestclients.newClientDialog.step2=Step 2 of 4 - Format +harvestclients.newClientDialog.oaiSets=OAI Set +harvestclients.newClientDialog.oaiSets.tip=Harvesting sets offered by this OAI server. +harvestclients.newClientDialog.oaiSets.noset=None +harvestclients.newClientDialog.oaiSets.helptext=Selecting "none" will harvest the default set, as defined by the server. Often this will be the entire body of content across all sub-sets. +harvestclients.newClientDialog.oaiSets.helptext.noset=This OAI server does not support named sets. The entire body of content offered by the server will be harvested. +harvestclients.newClientDialog.oaiSets.listTruncated=Please note that the remote server was taking too long to return the full list of available OAI sets, so the list was truncated. Please select a set from the current list (or select the "no set" option), and try again later, if you need to change it. +harvestclients.newClientDialog.oaiMetadataFormat=Metadata Format +harvestclients.newClientDialog.oaiMetadataFormat.tip=Metadata formats offered by the remote server. +harvestclients.newClientDialog.oaiMetadataFormat.required=Please select the metadata format to harvest from this archive. +harvestclients.newClientDialog.step3=Step 3 of 4 - Schedule +harvestclients.newClientDialog.schedule=Schedule +harvestclients.newClientDialog.schedule.tip=Schedule harvesting to run automatically daily or weekly. +harvestclients.newClientDialog.schedule.time.none.helptext=Leave harvesting unscheduled to run on demand only. +harvestclients.newClientDialog.schedule.none=None +harvestclients.newClientDialog.schedule.daily=Daily +harvestclients.newClientDialog.schedule.weekly=Weekly +harvestclients.newClientDialog.schedule.time=Time +harvestclients.newClientDialog.schedule.day=Day +harvestclients.newClientDialog.schedule.time.am=AM +harvestclients.newClientDialog.schedule.time.pm=PM +harvestclients.newClientDialog.schedule.time.helptext=Scheduled times are in your local time. +harvestclients.newClientDialog.btn.create=Create Client +harvestclients.newClientDialog.success=Successfully created harvesting client "{0}". +harvestclients.newClientDialog.step4=Step 4 of 4 - Display +harvestclients.newClientDialog.harvestingStyle=Archive Type +harvestclients.newClientDialog.harvestingStyle.tip=Type of remote archive. +harvestclients.newClientDialog.harvestingStyle.helptext=Select the archive type that best describes this remote server in order to properly apply formatting rules and styles to the harvested metadata as they are shown in the search results. Note that improperly selecting the type of the remote archive can result in incomplete entries in the search results, and a failure to redirect the user to the archival source of the data. +harvestclients.newClientDialog.harvestingStyle.required=Please select one of the values from the menu. +harvestclients.viewEditDialog.title=Edit Harvesting Client +harvestclients.viewEditDialog.archiveUrl=Archive URL +harvestclients.viewEditDialog.archiveUrl.tip=The URL of the archive that serves the data harvested by this client, which is used in search results for links to the original sources of the harvested content. +harvestclients.viewEditDialog.archiveUrl.helptext=Edit if this URL differs from the Server URL. +harvestclients.viewEditDialog.archiveDescription=Archive Description +harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival source of the harvested content, displayed in search results. +harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. +harvestclients.viewEditDialog.btn.save=Save Changes +harvestclients.newClientDialog.title.edit=Edit Group {0} + +#harvestset.xhtml +harvestserver.title=Manage Harvesting Server +harvestserver.toptip=Define sets of local datasets that will be available for harvesting by remote clients. +harvestserver.service.label=OAI Server +harvestserver.service.enabled=Enabled +harvestserver.service.disabled=Disabled +harvestserver.service.disabled.msg=Harvesting Server is currently disabled. +harvestserver.service.empty=No sets are configured. +harvestserver.service.enable.success=OAI Service has been successfully enabled. +harvestserver.noSets.why.header=What is a Harvesting Server? +harvestserver.noSets.why.reason1=Harvesting is a process of exchanging metadata with other repositories. As a harvesting server, your Dataverse can make some of the local dataset metadata available to remote harvesting clients. These can be other Dataverse instances, or any other clients that support OAI-PMH harvesting protocol. +harvestserver.noSets.why.reason2=Only the published, unrestricted datasets in your Dataverse can be harvested. Remote clients normally keep their records in sync through scheduled incremental updates, daily or weekly, thus minimizing the load on your server. Note that it is only the metadata that are harvested. Remote harvesters will generally not attempt to download the data files themselves. +harvestserver.noSets.how.header=How to run a Harvesting Server? +harvestserver.noSets.how.tip1=Harvesting server can be enabled or disabled on this page. +harvestserver.noSets.how.tip2=Once the service is enabled, you can define collections of local datasets that will be available to remote harvesters as OAI Sets. Sets are defined by search queries (for example, authorName:king; or parentId:1234 - to select all the datasets that belong to the dataverse specified; or dsPersistentId:"doi:1234/" to select all the datasets with the persistent identifier authority specified). Consult the Search API section of the Dataverse User Guide for more information on the search queries. +harvestserver.noSets.getStarted=To get started, enable the OAI server and click on the Add Set button. To learn more about Harvesting, visit the Harvesting section of the User Guide. +harvestserver.btn.add=Add Set +harvestserver.tab.header.spec=OAI setSpec +harvestserver.tab.col.spec.default=DEFAULT +harvestserver.tab.header.description=Description +harvestserver.tab.header.definition=Definition Query +harvestserver.tab.col.definition.default=All Published Local Datasets +harvestserver.tab.header.stats=Datasets +harvestserver.tab.col.stats.empty=No records (empty set) +harvestserver.tab.col.stats.results={0} {0, choice, 0#datasets|1#dataset|2#datasets} ({1} {1, choice, 0#records|1#record|2#records} exported, {2} marked as deleted) +harvestserver.tab.header.action=Actions +harvestserver.tab.header.action.btn.export=Run Export +harvestserver.actions.runreexport.success=Successfully started an asynchronous re-export job for OAI set "{0}" (please reload the page to check on the export progress). +harvestserver.tab.header.action.btn.edit=Edit +harvestserver.tab.header.action.btn.delete=Delete +harvestserver.tab.header.action.btn.delete.dialog.header=Delete Harvesting Set +harvestserver.tab.header.action.btn.delete.dialog.tip=Are you sure you want to delete the OAI set "{0}"? You cannot undo a delete! +harvestserver.tab.header.action.delete.infomessage=Selected harvesting set is being deleted. (this may take a few moments) +harvestserver.newSetDialog.title.new=Create Harvesting Set +harvestserver.newSetDialog.help=Define a set of local datasets available for harvesting to remote clients. +harvestserver.newSetDialog.setspec=Name/OAI setSpec +harvestserver.newSetDialog.setspec.tip=A unique name (OAI setSpec) identifying this set. +harvestserver.newSetDialog.setspec.helptext=Consists of letters, digits, underscores (_) and dashes (-). +harvestserver.editSetDialog.setspec.helptext=The name can not be changed once the set has been created. +harvestserver.editSetDialog.setspec.helptext.default=this is the default, unnamed set +harvestserver.newSetDialog.setspec.required=Name (OAI setSpec) cannot be empty! +harvestserver.newSetDialog.setspec.invalid=Name (OAI setSpec) can contain only letters, digits, underscores (_) and dashes (-). +harvestserver.newSetDialog.setspec.alreadyused=This set name (OAI setSpec) is already used. +harvestserver.newSetDialog.setspec.sizelimit=This set name (OAI setSpec) may be no longer than 30 characters. +harvestserver.newSetDialog.setspec.superUser.required=Only superusers may create OAI sets. +harvestserver.newSetDialog.setdescription=Description +harvestserver.newSetDialog.setdescription.tip=Provide a brief description for this OAI set. +harvestserver.newSetDialog.setdescription.required=Set description cannot be empty! +harvestserver.newSetDialog.setdescription.default=The default, "no name" set. The OAI server will serve the records from this set when no "setspec" argument is specified by the client. +harvestserver.newSetDialog.setquery=Definition Query +harvestserver.newSetDialog.setquery.tip=Search query that defines the content of the dataset. +harvestserver.newSetDialog.setquery.helptext=Example query: authorName:king +harvestserver.newSetDialog.setquery.required=Search query cannot be left empty! +harvestserver.newSetDialog.setquery.results=Search query returned {0} datasets! +harvestserver.newSetDialog.setquery.empty=WARNING: Search query returned no results! +harvestserver.newSetDialog.btn.create=Create Set +harvestserver.newSetDialog.success=Successfully created harvesting set "{0}". +harvestserver.viewEditDialog.title=Edit Harvesting Set +harvestserver.viewEditDialog.btn.save=Save Changes + +#dashboard-users.xhtml +dashboard.card.users=Users +dashboard.card.users.header=Dashboard - User List +dashboard.card.users.super=Superusers +dashboard.card.users.manage=Manage Users +dashboard.card.users.message=List and manage users. +dashboard.list_users.searchTerm.watermark=Search these users... +dashboard.list_users.tbl_header.userId=ID +dashboard.list_users.tbl_header.userIdAZ=ID (A-Z) +dashboard.list_users.tbl_header.userIdZA=ID (Z-A) +dashboard.list_users.tbl_header.userIdentifier=Username +dashboard.list_users.tbl_header.userIdentifierAZ=Username (A-Z) +dashboard.list_users.tbl_header.userIdentifierZA=Username (Z-A) +dashboard.list_users.tbl_header.name=Name +dashboard.list_users.tbl_header.lastName=Last Name +dashboard.list_users.tbl_header.lastNameAZ=Last Name (A-Z) +dashboard.list_users.tbl_header.lastNameZA=Last Name (Z-A) +dashboard.list_users.tbl_header.firstName=First Name +dashboard.list_users.tbl_header.email=Email +dashboard.list_users.tbl_header.emailAZ=Email (A-Z) +dashboard.list_users.tbl_header.emailZA=Email (Z-A) +dashboard.list_users.tbl_header.affiliation=Affiliation +dashboard.list_users.tbl_header.affiliationAZ=Affiliation (A-Z) +dashboard.list_users.tbl_header.affiliationZA=Affiliation (Z-A) +dashboard.list_users.tbl_header.roles=Roles +dashboard.list_users.tbl_header.position=Position +dashboard.list_users.tbl_header.isSuperuser=Superuser +dashboard.list_users.tbl_header.superuserAZ=Superuser (A-Z) +dashboard.list_users.tbl_header.superuserZA=Superuser (Z-A) +dashboard.list_users.tbl_header.authProviderFactoryAlias=Authentication +dashboard.list_users.tbl_header.authProviderFactoryAliasAZ=Authentication (A-Z) +dashboard.list_users.tbl_header.authProviderFactoryAliasZA=Authentication (Z-A) +dashboard.list_users.tbl_header.createdTime=Created Time +dashboard.list_users.tbl_header.lastLoginTime=Last Login Time +dashboard.list_users.tbl_header.lastApiUseTime=Last API Use Time +dashboard.list_users.tbl_header.deactivated=deactivated +dashboard.list_users.tbl_header.roles.removeAll=Remove All +dashboard.list_users.tbl_header.roles.removeAll.header=Remove All Roles +dashboard.list_users.tbl_header.roles.removeAll.confirmationText=Are you sure you want to remove all roles for user {0}? +dashboard.list_users.removeAll.message.success=All roles have been removed for user {0}. +dashboard.list_users.removeAll.message.failure=Failed to remove roles for user {0}. +dashboard.list_users.toggleSuperuser=Edit Superuser Status +dashboard.list_users.toggleSuperuser.confirmationText.add=Are you sure you want to enable superuser status for user {0}? +dashboard.list_users.toggleSuperuser.confirmationText.remove=Are you sure you want to disable superuser status for user {0}? +dashboard.list_users.api.auth.invalid_apikey=The API key is invalid. +dashboard.list_users.api.auth.not_superuser=Forbidden. You must be a superuser. + +#dashboard-datamove.xhtml +dashboard.card.datamove=Data +dashboard.card.datamove.header=Dashboard - Move Data +dashboard.card.datamove.manage=Move Data +dashboard.card.datamove.message=Manage and curate your installation by moving datasets from one host dataverse to another. See also Managing Datasets and Dataverses in the Admin Guide. +dashboard.card.datamove.selectdataset.header=Dataset to Move +dashboard.card.datamove.newdataverse.header=New Host Dataverse +dashboard.card.datamove.dataset.label=Dataset +dashboard.card.datamove.dataverse.label=Dataverse +dashboard.card.datamove.confirm.dialog=Are you sure want to move this dataset? +dashboard.card.datamove.confirm.yes=Yes, Move Data +dashboard.card.datamove.message.success=The dataset "{0}" ({1}) has been successfully moved to {2}. +dashboard.card.datamove.message.failure.summary=Failed to moved dataset +dashboard.card.datamove.message.failure.details=The dataset "{0}" ({1}) could not be moved to {2}. {3}{4} +dashboard.card.datamove.dataverse.placeholder=Enter Dataverse Identifier... +dashboard.card.datamove.dataverse.menu.header=Dataverse Name (Affiliate), Identifier +dashboard.card.datamove.dataverse.menu.invalidMsg=No matches found +dashboard.card.datamove.dataset.placeholder=Enter Dataset Persistent ID, doi:... +dashboard.card.datamove.dataset.menu.header=Dataset Persistent ID, Title, Host Dataverse Identifier +dashboard.card.datamove.dataset.menu.invalidMsg=No matches found +dashboard.card.datamove.dataset.command.error.targetDataverseUnpublishedDatasetPublished=A published dataset may not be moved to an unpublished dataverse. You can retry the move after publishing {0}. +dashboard.card.datamove.dataset.command.error.targetDataverseSameAsOriginalDataverse=This dataset is already in this dataverse. +dashboard.card.datamove.dataset.command.error.unforced.datasetGuestbookNotInTargetDataverse=The guestbook would be removed from this dataset if you moved it because the guestbook is not in the new host dataverse. +dashboard.card.datamove.dataset.command.error.unforced.linkedToTargetDataverseOrOneOfItsParents=This dataset is linked to the new host dataverse or one of its parents. This move would remove the link to this dataset. +dashboard.card.datamove.dataset.command.error.unforced.suggestForce=Forcing this move is currently only available via API. Please see "Move a Dataset" under Managing Datasets and Dataverses in the Admin Guide for details. +dashboard.card.datamove.dataset.command.error.indexingProblem=Dataset could not be moved. Indexing failed. + +#MailServiceBean.java +notification.email.create.dataverse.subject={0}: Your dataverse has been created +notification.email.create.dataset.subject={0}: Your dataset has been created +notification.email.request.file.access.subject={0}: Access has been requested for a restricted file +notification.email.grant.file.access.subject={0}: You have been granted access to a restricted file +notification.email.rejected.file.access.subject={0}: Your request for access to a restricted file has been rejected +notification.email.submit.dataset.subject={0}: Your dataset has been submitted for review +notification.email.publish.dataset.subject={0}: Your dataset has been published +notification.email.publishFailure.dataset.subject={0}: Failed to publish your dataset +notification.email.returned.dataset.subject={0}: Your dataset has been returned +notification.email.workflow.success.subject={0}: Your dataset has been processed +notification.email.workflow.success=A workflow running on {0} (view at {1}) succeeded: {2} +notification.email.workflow.failure.subject={0}: Failed to process your dataset +notification.email.workflow.failure=A workflow running on {0} (view at {1}) failed: {2} +notification.email.workflow.nullMessage=No additional message sent from the workflow. +notification.email.create.account.subject={0}: Your account has been created +notification.email.assign.role.subject={0}: You have been assigned a role +notification.email.revoke.role.subject={0}: Your role has been revoked +notification.email.verifyEmail.subject={0}: Verify your email address +notification.email.ingestCompleted.subject={0}: Your ingest has successfully finished! +notification.email.ingestCompletedWithErrors.subject={0}: Your ingest has finished with errors! +notification.email.greeting=Hello, \n +notification.email.greeting.html=Hello,
+# Bundle file editors, please note that "notification.email.welcome" is used in a unit test +notification.email.welcome=Welcome to {0}! Get started by adding or finding data. Have questions? Check out the User Guide at {1}/{2}/user or contact {3} at {4} for assistance. +notification.email.welcomeConfirmEmailAddOn=\n\nPlease verify your email address at {0} . Note, the verify link will expire after {1}. Send another verification email by visiting your account page. +notification.email.requestFileAccess=File access requested for dataset: {0} by {1} ({2}). Manage permissions at {3}. +notification.email.grantFileAccess=Access granted for files in dataset: {0} (view at {1}). +notification.email.rejectFileAccess=Your request for access was rejected for the requested files in the dataset: {0} (view at {1}). If you have any questions about why your request was rejected, you may reach the dataset owner using the "Contact" link on the upper right corner of the dataset page. +# Bundle file editors, please note that "notification.email.createDataverse" is used in a unit test +notification.email.createDataverse=Your new dataverse named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with your dataverse, check out the Dataverse Management - User Guide at {4}/{5}/user/dataverse-management.html . +# Bundle file editors, please note that "notification.email.createDataset" is used in a unit test +notification.email.createDataset=Your new dataset named {0} (view at {1} ) was created in {2} (view at {3} ). To learn more about what you can do with a dataset, check out the Dataset Management - User Guide at {4}/{5}/user/dataset-management.html . +notification.email.wasSubmittedForReview={0} (view at {1}) was submitted for review to be published in {2} (view at {3}). Don''t forget to publish it or send it back to the contributor, {4} ({5})\! +notification.email.wasReturnedByReviewer={0} (view at {1}) was returned by the curator of {2} (view at {3}). +notification.email.wasPublished={0} (view at {1}) was published in {2} (view at {3}). +notification.email.publishFailedPidReg={0} (view at {1}) in {2} (view at {3}) could not be published due to a failure to register, or update the Global Identifier for the dataset or one of the files in it. Contact support if this continues to happen. +notification.email.closing=\n\nYou may contact us for support at https://site.uit.no/dataverseno/support/ .\n\nThank you,\n{1} +notification.email.closing.html=

You may contact us for support at {0}.

Thank you,
{1} +notification.email.assignRole=You are now {0} for the {1} "{2}" (view at {3}). +notification.email.revokeRole=One of your roles for the {0} "{1}" has been revoked (view at {2}). +notification.email.changeEmail=Hello, {0}.{1}\n\nPlease contact us if you did not intend this change or if you need assistance. +notification.email.passwordReset=Hi {0},\n\nSomeone, hopefully you, requested a password reset for {1}.\n\nPlease click the link below to reset your Dataverse account password:\n\n {2} \n\n The link above will only work for the next {3} minutes.\n\n Please contact us if you did not request this password reset or need further help. +notification.email.passwordReset.subject=Dataverse Password Reset Requested +hours=hours +hour=hour +minutes=minutes +minute=minute +notification.email.checksumfail.subject={0}: Your upload failed checksum validation +notification.email.import.filesystem.subject=Dataset {0} has been successfully uploaded and verified +notification.email.import.checksum.subject={0}: Your file checksum job has completed +contact.delegation={0} on behalf of {1} +contact.delegation.default_personal=Dataverse Installation Admin +notification.email.info.unavailable=Unavailable +notification.email.apiTokenGenerated=Hello {0} {1},\n\nAPI Token has been generated. Please keep it secure as you would do with a password. +notification.email.apiTokenGenerated.subject=API Token was generated + +# dataverse.xhtml +dataverse.name=Dataverse Name +dataverse.name.title=The project, department, university, professor, or journal this dataverse will contain data for. +dataverse.enterName=Enter name... +dataverse.host.title=The dataverse which contains this data. +dataverse.host.tip=Changing the host dataverse will clear any fields you may have entered data into. +dataverse.host.autocomplete.nomatches=No matches +dataverse.identifier.title=Short name used for the URL of this dataverse. +dataverse.affiliation.title=The organization with which this dataverse is affiliated. +dataverse.storage.title=A storage service to be used for datasets in this dataverse. +dataverse.category=Category +dataverse.category.title=The type that most closely reflects this dataverse. +dataverse.type.selectTab.top=Select one... +dataverse.type.selectTab.researchers=Researcher +dataverse.type.selectTab.researchProjects=Research Project +dataverse.type.selectTab.journals=Journal +dataverse.type.selectTab.organizationsAndInsitutions=Organization or Institution +dataverse.type.selectTab.teachingCourses=Teaching Course +dataverse.type.selectTab.uncategorized=Uncategorized +dataverse.type.selectTab.researchGroup=Research Group +dataverse.type.selectTab.laboratory=Laboratory +dataverse.type.selectTab.department=Department +dataverse.description.title=A summary describing the purpose, nature, or scope of this dataverse. +dataverse.email=Email +dataverse.email.title=The e-mail address(es) of the contact(s) for the dataverse. +dataverse.share.dataverseShare=Share Dataverse +dataverse.share.dataverseShare.tip=Share this dataverse on your favorite social media networks. +dataverse.share.dataverseShare.shareText=View this dataverse. +dataverse.subject.title=Subject(s) covered in this dataverse. +dataverse.metadataElements=Metadata Fields +dataverse.metadataElements.tip=Choose the metadata fields to use in dataset templates and when adding a dataset to this dataverse. +dataverse.metadataElements.from.tip=Use metadata fields from {0} +dataverse.resetModifications=Reset Modifications +dataverse.resetModifications.text=Are you sure you want to reset the selected metadata fields? If you do this, any customizations (hidden, required, optional) you have done will no longer appear. +dataverse.field.required=(Required) +dataverse.field.example1= (Examples: +dataverse.field.example2=) +dataverse.field.set.tip=[+] View fields + set as hidden, required, or optional +dataverse.field.set.view=[+] View fields +dataverse.field.requiredByDataverse=Required by Dataverse +dataverse.facetPickList.text=Browse/Search Facets +dataverse.facetPickList.tip=Choose the metadata fields to use as facets for browsing datasets and dataverses in this dataverse. +dataverse.facetPickList.facetsFromHost.text=Use browse/search facets from {0} +dataverse.facetPickList.metadataBlockList.all=All Metadata Fields +dataverse.edit=Edit +dataverse.option.generalInfo=General Information +dataverse.option.themeAndWidgets=Theme + Widgets +dataverse.option.featuredDataverse=Featured Dataverses +dataverse.option.permissions=Permissions +dataverse.option.dataverseGroups=Groups +dataverse.option.datasetTemplates=Dataset Templates +dataverse.option.datasetGuestbooks=Dataset Guestbooks +dataverse.option.deleteDataverse=Delete Dataverse +dataverse.publish.btn=Publish +dataverse.publish.header=Publish Dataverse +dataverse.nopublished=No Published Dataverses +dataverse.nopublished.tip=In order to use this feature you must have at least one published dataverse. +dataverse.contact=Email Dataverse Contact +dataverse.link=Link Dataverse +dataverse.link.btn.tip=Link to Your Dataverse +dataverse.link.yourDataverses=Your Dataverse +dataverse.link.yourDataverses.inputPlaceholder=Enter Dataverse Name +dataverse.link.save=Save Linked Dataverse +dataverse.link.dataverse.choose=Choose which of your dataverses you would like to link this dataverse to. +dataverse.link.dataset.choose=Enter the name of the dataverse you would like to link this dataset to. If you need to remove this link in the future, please contact {0}. +dataverse.link.dataset.none=No linkable dataverses available. +dataverse.link.no.choice=You have one dataverse you can add linked dataverses and datasets in. +dataverse.link.no.linkable=To be able to link a dataverse or dataset, you need to have your own dataverse. Create a dataverse to get started. +dataverse.link.no.linkable.remaining=You have already linked all of your eligible dataverses. +dataverse.savedsearch.link=Link Search +dataverse.savedsearch.searchquery=Search +dataverse.savedsearch.filterQueries=Facets +dataverse.savedsearch.save=Save Linked Search +dataverse.savedsearch.dataverse.choose=Choose which of your dataverses you would like to link this search to. +dataverse.savedsearch.no.choice=You have one dataverse to which you may add a saved search. +# Bundle file editors, please note that "dataverse.savedsearch.save.success" is used in a unit test +dataverse.saved.search.success=The saved search has been successfully linked to {0}. +dataverse.saved.search.failure=The saved search was not able to be linked. +dataverse.linked.success= {0} has been successfully linked to {1}. +dataverse.linked.success.wait= {0} has been successfully linked to {1}. Please wait for its contents to appear. +dataverse.linked.internalerror={0} has been successfully linked to {1} but contents will not appear until an internal error has been fixed. +dataverse.linked.error.alreadyLinked={0} has already been linked to {1}. +dataverse.page.pre=Previous +dataverse.page.next=Next +dataverse.byCategory=Dataverses by Category +dataverse.displayFeatured=Display the dataverses selected below on the landing page of this dataverse. +dataverse.selectToFeature=Select dataverses to feature on the landing page of this dataverse. +dataverse.publish.tip=Are you sure you want to publish your dataverse? Once you do so it must remain published. +dataverse.publish.failed.tip=This dataverse cannot be published because the dataverse it is in has not been published. +dataverse.publish.failed=Cannot publish dataverse. +dataverse.publish.success=Your dataverse is now public. +dataverse.publish.failure=This dataverse was not able to be published. +dataverse.delete.tip=Are you sure you want to delete your dataverse? You cannot undelete this dataverse. +dataverse.delete=Delete Dataverse +dataverse.delete.success=Your dataverse has been deleted. +dataverse.delete.failure=This dataverse was not able to be deleted. +# Bundle file editors, please note that "dataverse.create.success" is used in a unit test because it's so fancy with two parameters +dataverse.create.success=You have successfully created your dataverse! To learn more about what you can do with your dataverse, check out the User Guide. +dataverse.create.failure=This dataverse was not able to be created. +dataverse.create.authenticatedUsersOnly=Only authenticated users can create dataverses. +dataverse.update.success=You have successfully updated your dataverse! +dataverse.update.failure=This dataverse was not able to be updated. +dataverse.selected=Selected +dataverse.listing.error=Fatal error trying to list the contents of the dataverse. Please report this error to the Dataverse administrator. +dataverse.datasize=Total size of the files stored in this dataverse: {0} bytes +dataverse.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataverse. Please report this error to the Dataverse administrator. +dataverse.storage.inherited=(inherited from enclosing Dataverse) +dataverse.storage.default=(Default) +# rolesAndPermissionsFragment.xhtml + +# advanced.xhtml +advanced.search.header.dataverses=Dataverses +advanced.search.dataverses.name.tip=The project, department, university, professor, or journal this Dataverse will contain data for. +advanced.search.dataverses.affiliation.tip=The organization with which this Dataverse is affiliated. +advanced.search.dataverses.description.tip=A summary describing the purpose, nature, or scope of this Dataverse. +advanced.search.dataverses.subject.tip=Domain-specific Subject Categories that are topically relevant to this Dataverse. +advanced.search.header.datasets=Datasets +advanced.search.header.files=Files +advanced.search.files.name.tip=The name given to identify the file. +advanced.search.files.description.tip=A summary describing the file and its variables. +advanced.search.files.persistentId.tip=The persistent identifier for the file. +advanced.search.files.persistentId=Data File Persistent ID +advanced.search.files.persistentId.tip=The unique persistent identifier for a data file, which can be a Handle or DOI in Dataverse. +advanced.search.files.fileType=File Type +advanced.search.files.fileType.tip=The file type, e.g. Comma Separated Values, Plain Text, R, etc. +advanced.search.files.variableName=Variable Name +advanced.search.files.variableName.tip=The name of the variable's column in the data frame. +advanced.search.files.variableLabel=Variable Label +advanced.search.files.variableLabel.tip=A short description of the variable. +advanced.search.datasets.persistentId.tip=The persistent identifier for the dataset. +advanced.search.datasets.persistentId=Dataset Persistent ID +advanced.search.datasets.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +advanced.search.files.fileTags=File Tags +advanced.search.files.fileTags.tip=Terms such "Documentation", "Data", or "Code" that have been applied to files. + +# search +search.datasets.literalquestion=Text of the actual, literal question asked. +search.datasets.interviewinstructions=Specific instructions to the individual conducting an interview. +search.datasets.postquestion=Text describing what occurs after the literal question has been asked. +search.datasets.variableuniverse=The group of persons or other elements that are the object of research and to which any analytic results refer. +search.datasets.variableNotes=For clarifying information/annotation regarding the variable. + +# search-include-fragment.xhtml +dataverse.search.advancedSearch=Advanced Search +dataverse.search.input.watermark=Search this dataverse... +account.search.input.watermark=Search this data... +dataverse.search.btn.find=Find +dataverse.results.btn.addData=Add Data +dataverse.results.btn.addData.newDataverse=New Dataverse +dataverse.results.btn.addData.newDataset=New Dataset +dataverse.results.dialog.addDataGuest.header=Add Data +dataverse.results.dialog.addDataGuest.msg=Log in to create a dataverse or add a dataset. +dataverse.results.dialog.addDataGuest.msg.signup=You need to Log In to add a dataset. +dataverse.results.dialog.addDataGuest.signup.title=Sign Up for a Dataverse Account +dataverse.results.dialog.addDataGuest.login.title=Log into your Dataverse Account +dataverse.results.types.dataverses=Dataverses +dataverse.results.types.datasets=Datasets +dataverse.results.types.files=Files +dataverse.results.btn.filterResults=Filter Results +# Bundle file editors, please note that "dataverse.results.empty.zero" is used in a unit test +dataverse.results.empty.zero=There are no dataverses, datasets, or files that match your search. Please try a new search by using other or broader terms. You can also check out the search guide for tips. +# Bundle file editors, please note that "dataverse.results.empty.hidden" is used in a unit test +dataverse.results.empty.hidden=There are no search results based on how you have narrowed your search. You can check out the search guide for tips. +dataverse.results.empty.browse.guest.zero=This dataverse currently has no dataverses, datasets, or files. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.guest.hidden=There are no dataverses within this dataverse. Please log in to see if you are able to add to it. +dataverse.results.empty.browse.loggedin.noperms.zero=This dataverse currently has no dataverses, datasets, or files. You can use the Email Dataverse Contact button above to ask about this dataverse or request access for this dataverse. +dataverse.results.empty.browse.loggedin.noperms.hidden=There are no dataverses within this dataverse. +dataverse.results.empty.browse.loggedin.perms.zero=This dataverse currently has no dataverses, datasets, or files. You can add to it by using the Add Data button on this page. +account.results.empty.browse.loggedin.perms.zero=You have no dataverses, datasets, or files associated with your account. You can add a dataverse or dataset by clicking the Add Data button above. Read more about adding data in the User Guide. +dataverse.results.empty.browse.loggedin.perms.hidden=There are no dataverses within this dataverse. You can add to it by using the Add Data button on this page. +dataverse.results.empty.link.technicalDetails=More technical details +dataverse.search.facet.error=There was an error with your search parameters. Please clear your search and try again. +dataverse.results.count.toofresults={0} to {1} of {2} {2, choice, 0#Results|1#Result|2#Results} +dataverse.results.paginator.current=(Current) +dataverse.results.btn.sort=Sort +dataverse.results.btn.sort.option.nameAZ=Name (A-Z) +dataverse.results.btn.sort.option.nameZA=Name (Z-A) +dataverse.results.btn.sort.option.newest=Newest +dataverse.results.btn.sort.option.oldest=Oldest +dataverse.results.btn.sort.option.relevance=Relevance +dataverse.results.cards.foundInMetadata=Found in Metadata Fields: +dataverse.results.cards.files.tabularData=Tabular Data +dataverse.results.solrIsDown=Please note: Due to an internal error, browsing and searching is not available. +dataverse.theme.title=Theme +dataverse.theme.inheritCustomization.title=For this dataverse, use the same theme as the parent dataverse. +dataverse.theme.inheritCustomization.label=Inherit Theme +dataverse.theme.inheritCustomization.checkbox=Inherit theme from {0} +dataverse.theme.logo=Logo +dataverse.theme.logo.tip=Supported image types are JPG, TIF, or PNG and should be no larger than 500 KB. The maximum display size for an image file in a dataverse's theme is 940 pixels wide by 120 pixels high. +dataverse.theme.logo.format=Logo Format +dataverse.theme.logo.format.selectTab.square=Square +dataverse.theme.logo.format.selectTab.rectangle=Rectangle +dataverse.theme.logo.alignment=Logo Alignment +dataverse.theme.logo.alignment.selectTab.left=Left +dataverse.theme.logo.alignment.selectTab.center=Center +dataverse.theme.logo.alignment.selectTab.right=Right +dataverse.theme.logo.backColor=Logo Background Color +dataverse.theme.logo.image.upload=Upload Image +dataverse.theme.tagline=Tagline +dataverse.theme.website=Website +dataverse.theme.linkColor=Link Color +dataverse.theme.txtColor=Text Color +dataverse.theme.backColor=Background Color +dataverse.theme.success=You have successfully updated the theme for this dataverse! +dataverse.theme.failure=The dataverse theme has not been updated. +dataverse.theme.logo.image=Logo Image +dataverse.theme.logo.imageFooter=Footer Image +dataverse.theme.logo.image.title=The logo or image file you wish to display in the header of this dataverse. +dataverse.theme.logo.image.footer=The logo or image file you wish to display in the footer of this dataverse. +dataverse.theme.logo.image.uploadNewFile=Upload New File +dataverse.theme.logo.image.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataverse.theme.logo.image.uploadImgFile=Upload Image File +dataverse.theme.logo.format.title=The shape for the logo or image file you upload for this dataverse. +dataverse.theme.logo.alignment.title=Where the logo or image should display in the header or footer. +dataverse.theme.logo.backColor.title=Select a color to display in the header or footer of this dataverse. +dataverse.theme.headerColor=Header Colors +dataverse.theme.headerColor.tip=Colors you select to style the header of this dataverse. +dataverse.theme.backColor.title=Color for the header area that contains the image, tagline, URL, and text. +dataverse.theme.linkColor.title=Color for the link to display as. +dataverse.theme.txtColor.title=Color for the tagline text and the name of this dataverse. +dataverse.theme.tagline.title=A phrase or sentence that describes this dataverse. +dataverse.theme.tagline.tip=Provide a tagline that is 140 characters or less. +dataverse.theme.website.title=URL for your personal website, institution, or any website that relates to this dataverse. +dataverse.theme.website.tip=The website will be linked behind the tagline. To have a website listed, you must also provide a tagline. +dataverse.theme.website.watermark=Your personal site, http://... +dataverse.theme.website.invalidMsg=Invalid URL. +dataverse.theme.disabled=The theme for the root dataverse has been administratively disabled with the :DisableRootDataverseTheme database setting. +dataverse.widgets.title=Widgets +dataverse.widgets.notPublished.why.header=Why Use Widgets? +dataverse.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataverse.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataverse.widgets.notPublished.how.header=How To Use Widgets +dataverse.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataverse.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataverse.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataverse.widgets.notPublished.getStarted=To get started, publish your dataverse. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Theme + Widgets section of the User Guide. +dataverse.widgets.searchBox.txt=Dataverse Search Box +dataverse.widgets.searchBox.tip=Add a way for visitors on your website to be able to search Dataverse. +dataverse.widgets.dataverseListing.txt=Dataverse Listing +dataverse.widgets.dataverseListing.tip=Add a way for visitors on your website to be able to view your dataverses and datasets, sort, or browse through them. +dataverse.widgets.advanced.popup.header=Widget Advanced Options +dataverse.widgets.advanced.prompt=Forward dataset citation persistent URL's to your personal website. The page you submit as your Personal Website URL must contain the code snippet for the Dataverse Listing widget. +dataverse.widgets.advanced.url.label=Personal Website URL +dataverse.widgets.advanced.url.watermark=http://www.example.com/page-name +dataverse.widgets.advanced.invalid.message=Please enter a valid URL +dataverse.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataverse.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. + +# permissions-manage.xhtml +dataverse.permissions.title=Permissions +dataverse.permissions.dataset.title=Dataset Permissions +dataverse.permissions.access.accessBtn=Edit Access +dataverse.permissions.usersOrGroups=Users/Groups +dataverse.permissions.requests=Requests +dataverse.permissions.usersOrGroups.assignBtn=Assign Roles to Users/Groups +dataverse.permissions.usersOrGroups.createGroupBtn=Create Group +dataverse.permissions.usersOrGroups.description=All the users and groups that have access to your dataverse. +dataverse.permissions.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissions.usersOrGroups.tabHeader.id=ID +dataverse.permissions.usersOrGroups.tabHeader.role=Role +dataverse.permissions.usersOrGroups.tabHeader.action=Action +dataverse.permissions.usersOrGroups.assignedAt=Role assigned at {0} +dataverse.permissions.usersOrGroups.removeBtn=Remove Assigned Role +dataverse.permissions.usersOrGroups.removeBtn.confirmation=Are you sure you want to remove this role assignment? +dataverse.permissions.roles=Roles +dataverse.permissions.roles.add=Add New Role +dataverse.permissions.roles.description=All the roles set up in your dataverse, that you can assign to users and groups. +dataverse.permissions.roles.edit=Edit Role +dataverse.permissions.roles.copy=Copy Role +dataverse.permissions.roles.alias.required=Please enter a unique identifier for this role. +dataverse.permissions.roles.name.required=Please enter a name for this role. + +# permissions-manage-files.xhtml +dataverse.permissionsFiles.title=Restricted File Permissions +dataverse.permissionsFiles.usersOrGroups=Users/Groups +dataverse.permissionsFiles.usersOrGroups.assignBtn=Grant Access to Users/Groups +dataverse.permissionsFiles.usersOrGroups.description=All the users and groups that have access to restricted files in this dataset. +dataverse.permissionsFiles.usersOrGroups.tabHeader.userOrGroup=User/Group Name (Affiliation) +dataverse.permissionsFiles.usersOrGroups.tabHeader.id=ID +dataverse.permissionsFiles.usersOrGroups.tabHeader.email=Email +dataverse.permissionsFiles.usersOrGroups.tabHeader.authentication=Authentication +dataverse.permissionsFiles.usersOrGroups.tabHeader.files=Files +dataverse.permissionsFiles.usersOrGroups.tabHeader.access=Access +dataverse.permissionsFiles.usersOrGroups.file=File +dataverse.permissionsFiles.usersOrGroups.files=Files +dataverse.permissionsFiles.usersOrGroups.invalidMsg=There are no users or groups with access to the restricted files in this dataset. +dataverse.permissionsFiles.files=Restricted Files +dataverse.permissionsFiles.files.label={0, choice, 0#Restricted Files|1#Restricted File|2#Restricted Files} +dataverse.permissionsFiles.files.description=All the restricted files in this dataset. +dataverse.permissionsFiles.files.tabHeader.fileName=File Name +dataverse.permissionsFiles.files.tabHeader.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.tabHeader.access=Access +dataverse.permissionsFiles.files.tabHeader.publishedRestrictedState=Published +dataverse.permissionsFiles.files.tabHeader.draftRestrictedState=Draft +dataverse.permissionsFiles.files.deleted=Deleted +dataverse.permissionsFiles.files.public=Public +dataverse.permissionsFiles.files.restricted=Restricted +dataverse.permissionsFiles.files.roleAssignee=User/Group +dataverse.permissionsFiles.files.roleAssignees=Users/Groups +dataverse.permissionsFiles.files.roleAssignees.label={0, choice, 0#Users/Groups|1#User/Group|2#Users/Groups} +dataverse.permissionsFiles.files.assignBtn=Assign Access +dataverse.permissionsFiles.files.invalidMsg=There are no restricted files in this dataset. +dataverse.permissionsFiles.files.requested=Requested Files +dataverse.permissionsFiles.files.selected=Selecting {0} of {1} {2} +dataverse.permissionsFiles.files.includeDeleted=Include Deleted Files +dataverse.permissionsFiles.viewRemoveDialog.header=File Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn=Remove Access +dataverse.permissionsFiles.viewRemoveDialog.removeBtn.confirmation=Are you sure you want to remove access to this file? Once access has been removed, the user or group will no longer be able to download this file. +dataverse.permissionsFiles.assignDialog.header=Grant File Access +dataverse.permissionsFiles.assignDialog.description=Grant file access to users and groups. +dataverse.permissionsFiles.assignDialog.userOrGroup=Users/Groups +dataverse.permissionsFiles.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissionsFiles.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissionsFiles.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissionsFiles.assignDialog.fileName=File Name +dataverse.permissionsFiles.assignDialog.grantBtn=Grant +dataverse.permissionsFiles.assignDialog.rejectBtn=Reject + +# permissions-configure.xhtml +dataverse.permissions.accessDialog.header=Edit Access +dataverse.permissions.description=Current access configuration to your dataverse. +dataverse.permissions.tip=Select if all users or only certain users are able to add to this dataverse, by clicking the Edit Access button. +dataverse.permissions.Q1=Who can add to this dataverse? +dataverse.permissions.Q1.answer1=Anyone adding to this dataverse needs to be given access +dataverse.permissions.Q1.answer2=Anyone with a Dataverse account can add sub dataverses +dataverse.permissions.Q1.answer3=Anyone with a Dataverse account can add datasets +dataverse.permissions.Q1.answer4=Anyone with a Dataverse account can add sub dataverses and datasets +dataverse.permissions.Q2=When a user adds a new dataset to this dataverse, which role should be automatically assigned to them on that dataset? +dataverse.permissions.Q2.answer.editor.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, Submit datasets for review +dataverse.permissions.Q2.answer.manager.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use) +dataverse.permissions.Q2.answer.curator.description=- Edit metadata, upload files, and edit files, edit Terms, Guestbook, File Restrictions (Files Access + Use), Edit Permissions/Assign Roles + Publish +permission.anyoneWithAccount=Anyone with a Dataverse account + +# roles-assign.xhtml +dataverse.permissions.usersOrGroups.assignDialog.header=Assign Role +dataverse.permissions.usersOrGroups.assignDialog.description=Grant permissions to users and groups by assigning them a role. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup=Users/Groups +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.enterName=Enter User/Group Name +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.invalidMsg=No matches found. +dataverse.permissions.usersOrGroups.assignDialog.userOrGroup.requiredMsg=Please select at least one user or group. +dataverse.permissions.usersOrGroups.assignDialog.role.description=These are the permissions associated with the selected role. +dataverse.permissions.usersOrGroups.assignDialog.role.warning=Assigning the {0} role means the user(s) will also have the {0} role applied to all {1} within this {2}. +dataverse.permissions.usersOrGroups.assignDialog.role.requiredMsg=Please select a role to assign. + +# roles-edit.xhtml +dataverse.permissions.roles.header=Edit Role +dataverse.permissions.roles.name=Role Name +dataverse.permissions.roles.name.title=Enter a name for the role. +dataverse.permissions.roles.id=Identifier +dataverse.permissions.roles.id.title=Enter a name for the alias. +dataverse.permissions.roles.description.title=Describe the role (1000 characters max). +dataverse.permissions.roles.description.counter={0} characters remaining +dataverse.permissions.roles.roleList.header=Role Permissions +dataverse.permissions.roles.roleList.authorizedUserOnly=Permissions with an asterisk icon indicate actions that can be performed by users not logged into Dataverse. + +# explicitGroup-new-dialog.xhtml +dataverse.permissions.explicitGroupEditDialog.title.new=Create Group +dataverse.permissions.explicitGroupEditDialog.title.edit=Edit Group {0} +dataverse.permissions.explicitGroupEditDialog.help=Add users or other groups to this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier=Group Identifier +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.tip=Short name used for the ID of this group. +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.required=Group identifier cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.invalid=Group identifier can contain only letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.helpText=Consists of letters, digits, underscores (_) and dashes (-) +dataverse.permissions.explicitGroupEditDialog.groupIdentifier.taken=Group identifier already used in this dataverse +dataverse.permissions.explicitGroupEditDialog.groupName=Group Name +dataverse.permissions.explicitGroupEditDialog.groupName.required=Group name cannot be empty +dataverse.permissions.explicitGroupEditDialog.groupDescription=Description +dataverse.permissions.explicitGroupEditDialog.roleAssigneeName=User/Group +dataverse.permissions.explicitGroupEditDialog.roleAssigneeNames=Users/Groups +dataverse.permissions.explicitGroupEditDialog.createGroup=Create Group + +# manage-templates.xhtml +dataset.manageTemplates.pageTitle=Manage Dataset Templates +dataset.manageTemplates.select.txt=Include Templates from {0} +dataset.manageTemplates.createBtn=Create Dataset Template +dataset.manageTemplates.saveNewTerms=Save Dataset Template +dataset.manageTemplates.noTemplates.why.header=Why Use Templates? +dataset.manageTemplates.noTemplates.why.reason1=Templates are useful when you have several datasets that have the same information in multiple metadata fields that you would prefer not to have to keep manually typing in. +dataset.manageTemplates.noTemplates.why.reason2=Templates can be used to input instructions for those uploading datasets into your dataverse if you have a specific way you want a metadata field to be filled out. +dataset.manageTemplates.noTemplates.how.header=How To Use Templates +dataset.manageTemplates.noTemplates.how.tip1=Templates are created at the dataverse level, can be deleted (so it does not show for future datasets), set to default (not required), and can be copied so you do not have to start over when creating a new template with similar metadata from another template. When a template is deleted, it does not impact the datasets that have used the template already. +dataset.manageTemplates.noTemplates.how.tip2=Please note that the ability to choose which metadata fields are hidden, required, or optional is done on the General Information page for this dataverse. +dataset.manageTemplates.noTemplates.getStarted=To get started, click on the Create Dataset Template button above. To learn more about templates, visit the Dataset Templates section of the User Guide. +dataset.manageTemplates.tab.header.templte=Template Name +dataset.manageTemplates.tab.header.date=Date Created +dataset.manageTemplates.tab.header.usage=Usage +dataset.manageTemplates.tab.header.action=Action +dataset.manageTemplates.tab.action.btn.makeDefault=Make Default +dataset.manageTemplates.tab.action.btn.default=Default +dataset.manageTemplates.tab.action.btn.view=View +dataset.manageTemplates.tab.action.btn.copy=Copy +dataset.manageTemplates.tab.action.btn.edit=Edit +dataset.manageTemplates.tab.action.btn.edit.metadata=Metadata +dataset.manageTemplates.tab.action.btn.edit.terms=Terms +dataset.manageTemplates.tab.action.btn.delete=Delete +dataset.manageTemplates.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this template? A new dataset will not be able to use this template. +dataset.manageTemplates.tab.action.btn.delete.dialog.header=Delete Template +dataset.manageTemplates.tab.action.btn.view.dialog.header=Dataset Template Preview +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate=Dataset Template +dataset.manageTemplates.tab.action.btn.view.dialog.datasetTemplate.title=The dataset template which prepopulates info into the form automatically. +dataset.manageTemplates.tab.action.noedit.createdin=Template created at {0} +dataset.manageTemplates.delete.usedAsDefault=This template is the default template for the following dataverse(s). It will be removed as default as well. +dataset.message.manageTemplates.label=Manage Dataset Templates +dataset.message.manageTemplates.message=Create a template prefilled with metadata fields standard values, such as Author Affiliation, or add instructions in the metadata fields to give depositors more information on what metadata is expected. + +# metadataFragment.xhtml +dataset.anonymized.withheld=withheld + +# template.xhtml +dataset.template.name.tip=The name of the dataset template. +dataset.template.returnBtn=Return to Manage Templates +dataset.template.name.title=Enter a unique name for the template. +template.asterisk.tip=Asterisks indicate metadata fields that users will be required to fill out while adding a dataset to this dataverse. +dataset.template.popup.create.title=Create Template +dataset.template.popup.create.text=Do you want to add default Terms of Use and/or Access? +dataset.create.add.terms=Save + Add Terms + +# manage-groups.xhtml +dataverse.manageGroups.pageTitle=Manage Dataverse Groups +dataverse.manageGroups.createBtn=Create Group +dataverse.manageGroups.noGroups.why.header=Why Use Groups? +dataverse.manageGroups.noGroups.why.reason1=Groups allow you to assign roles and permissions for many users at once. +dataverse.manageGroups.noGroups.why.reason2=You can use groups to manage multiple different kinds of users (students, collaborators, etc.) +dataverse.manageGroups.noGroups.how.header=How To Use Groups +dataverse.manageGroups.noGroups.how.tip1=A group can contain both users and other groups. +dataverse.manageGroups.noGroups.how.tip2=You can assign permissions to a group in the "Permissions" view. +dataverse.manageGroups.noGroups.getStarted=To get started, click on the Create Group button above. +dataverse.manageGroups.tab.header.name=Group Name +dataverse.manageGroups.tab.header.id=Group ID +dataverse.manageGroups.tab.header.membership=Membership +dataverse.manageGroups.tab.header.action=Action +dataverse.manageGroups.tab.action.btn.view=View +dataverse.manageGroups.tab.action.btn.copy=Copy +dataverse.manageGroups.tab.action.btn.enable=Enable +dataverse.manageGroups.tab.action.btn.disable=Disable +dataverse.manageGroups.tab.action.btn.edit=Edit +dataverse.manageGroups.tab.action.btn.viewCollectedData=View Collected Data +dataverse.manageGroups.tab.action.btn.delete=Delete +dataverse.manageGroups.tab.action.btn.delete.dialog.header=Delete Group +dataverse.manageGroups.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this group? You cannot undelete a group. +dataverse.manageGroups.tab.action.btn.view.dialog.header=Dataverse Group +dataverse.manageGroups.tab.action.btn.view.dialog.group=Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.name=Member Name +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.type=Member Type +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.action=Action +dataverse.manageGroups.tab.action.btn.view.dialog.groupView.delete=Delete +dataverse.manageGroups.tab.action.btn.view.dialog.groupMembers=Group Members +dataverse.manageGroups.tab.action.btn.view.dialog.enterName=Enter User/Group Name +dataverse.manageGroups.tab.action.btn.view.dialog.invalidMsg=No matches found. + +# manage-guestbooks.xhtml +dataset.manageGuestbooks.pageTitle=Manage Dataset Guestbooks +dataset.manageGuestbooks.include=Include Guestbooks from {0} +dataset.manageGuestbooks.createBtn=Create Dataset Guestbook +dataset.manageGuestbooks.download.all.responses=Download All Responses +dataset.manageGuestbooks.download.responses=Download Responses +dataset.manageGuestbooks.noGuestbooks.why.header=Why Use Guestbooks? +dataset.manageGuestbooks.noGuestbooks.why.reason1=Guestbooks allow you to collect data about who is downloading the files from your datasets. You can decide to collect account information (username, given name & last name, affiliation, etc.) as well as create custom questions (e.g., What do you plan to use this data for?). +dataset.manageGuestbooks.noGuestbooks.why.reason2=You can download the data collected from the enabled guestbooks to be able to store it outside of Dataverse. +dataset.manageGuestbooks.noGuestbooks.how.header=How To Use Guestbooks +dataset.manageGuestbooks.noGuestbooks.how.tip1=A guestbook can be used for multiple datasets but only one guestbook can be used for a dataset. +dataset.manageGuestbooks.noGuestbooks.how.tip2=Custom questions can have free form text answers or have a user select an answer from several options. +dataset.manageGuestbooks.noGuestbooks.getStarted=To get started, click on the Create Dataset Guestbook button above. To learn more about Guestbooks, visit the Dataset Guestbook section of the User Guide. +dataset.manageGuestbooks.tab.header.name=Guestbook Name +dataset.manageGuestbooks.tab.header.date=Date Created +dataset.manageGuestbooks.tab.header.usage=Usage +dataset.manageGuestbooks.tab.header.responses=Responses +dataset.manageGuestbooks.tab.header.action=Action +dataset.manageGuestbooks.tab.action.btn.view=Preview +dataset.manageGuestbooks.tab.action.btn.copy=Copy +dataset.manageGuestbooks.tab.action.btn.enable=Enable +dataset.manageGuestbooks.tab.action.btn.disable=Disable +dataset.manageGuestbooks.tab.action.btn.edit=Edit +dataset.manageGuestbooks.tab.action.btn.preview=Preview +dataset.manageGuestbooks.tab.action.btn.viewCollectedData=View Responses +dataset.manageGuestbooks.tab.action.btn.delete=Delete +dataset.manageGuestbooks.tab.action.btn.delete.dialog.header=Delete Guestbook +dataset.manageGuestbooks.tab.action.btn.delete.dialog.tip=Are you sure you want to delete this guestbook? You cannot undelete a guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.header=Preview Guestbook +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook.title=Upon downloading files the guestbook asks for the following information. +dataset.manageGuestbooks.tab.action.btn.view.dialog.datasetGuestbook=Guestbook Name +dataset.manageGuestbooks.tab.action.btn.viewCollectedData.dialog.header=Dataset Guestbook Collected Data +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData.title=User data collected by the guestbook. +dataset.manageGuestbooks.tab.action.btn.view.dialog.userCollectedData=Collected Data +dataset.manageGuestbooks.tab.action.noedit.createdin=Guestbook created at {0} +dataset.manageGuestbooks.message.deleteSuccess=The guestbook has been deleted. +dataset.manageGuestbooks.message.deleteFailure=The guestbook cannot be deleted. +dataset.manageGuestbooks.message.editSuccess=The guestbook has been updated. +dataset.manageGuestbooks.message.editFailure=The guestbook could not be updated. +dataset.manageGuestbooks.message.enableSuccess=The guestbook has been enabled. +dataset.manageGuestbooks.message.enableFailure=The guestbook could not be enabled. +dataset.manageGuestbooks.message.disableSuccess=The guestbook has been disabled. +dataset.manageGuestbooks.message.disableFailure=The guestbook could not be disabled. +dataset.manageGuestbooks.tip.title=Manage Dataset Guestbooks +dataset.manageGuestbooks.tip.downloadascsv=Click \"Download All Responses\" to download all collected guestbook responses for this dataverse, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.dataset=Dataset +dataset.guestbooksResponses.date=Date +dataset.guestbooksResponses.type=Type +dataset.guestbooksResponses.file=File +dataset.guestbooksResponses.customQuestions=Custom Questions +dataset.guestbooksResponses.user=User +dataset.guestbooksResponses.tip.title=Guestbook Responses +dataset.guestbooksResponses.count.responses={0} {0, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.count.toofresults={0} to {1} of {2} {2, choice, 0#Responses|1#Response|2#Responses} +dataset.guestbooksResponses.tip.downloadascsv=Click \"Download Responses\" to download all collected responses for this guestbook, as a CSV file. To navigate and analyze your collected responses, we recommend importing this CSV file into Excel, Google Sheets or similar software. +dataset.guestbooksResponses.tooManyResponses.message=Note: this guestbook has too many responses to display on this page. Only the most recent {0} responses are shown below. Click \"Download Responses\" to download all collected responses ({1} total) as a CSV file. + +# guestbook-responses.xhtml +dataset.guestbookResponses.pageTitle=Guestbook Responses + +# guestbook.xhtml +dataset.manageGuestbooks.guestbook.name=Guestbook Name +dataset.manageGuestbooks.guestbook.name.tip=Enter a unique name for this Guestbook. +dataset.manageGuestbooks.guestbook.dataCollected=Data Collected +dataset.manageGuestbooks.guestbook.dataCollected.description=Dataverse account information that will be collected when a user downloads a file. Check the ones that will be required. +dataset.manageGuestbooks.guestbook.customQuestions=Custom Questions +dataset.manageGuestbooks.guestbook.accountInformation=Account Information +dataset.manageGuestbooks.guestbook.required=(Required) +dataset.manageGuestbooks.guestbook.optional=(Optional) +dataset.manageGuestbooks.guestbook.customQuestions.description=Create your own questions to have users provide more than their account information when they download a file. Questions can be required or optional and answers can be text or multiple choice. +dataset.manageGuestbooks.guestbook.customQuestions.questionType=Question Type +dataset.manageGuestbooks.guestbook.customQuestions.questionText=Question Text +dataset.manageGuestbooks.guestbook.customQuestions.responseOptions=Response Options +dataset.manageGuestbooks.guestbook.customQuestions.questionType.text=Text +dataset.manageGuestbooks.guestbook.customQuestions.questionType.multiple=Multiple Choice + +# guestbookResponseFragment.xhtml +dataset.guestbookResponse.guestbook.additionalQuestions=Additional Questions +dataset.guestbookResponse.showPreview.errorMessage=Can't show preview. +dataset.guestbookResponse.showPreview.errorDetail=Couldn't write guestbook response. + +# dataset.xhtml +dataset.configureBtn=Configure +dataset.pageTitle=Add New Dataset + +dataset.accessBtn=Access Dataset +dataset.accessBtn.header.download=Download Options +dataset.accessBtn.header.explore=Explore Options +dataset.accessBtn.header.compute=Compute Options +dataset.accessBtn.download.size=ZIP ({0}) +dataset.accessBtn.too.big=The dataset is too large to download. Please select the files you need from the files table. +dataset.accessBtn.original.too.big=The dataset is too large to download in the original format. Please select the files you need from the files table. +dataset.accessBtn.archival.too.big=The dataset is too large to download in the archival format. Please select the files you need from the files table. +dataset.linkBtn=Link Dataset +dataset.contactBtn=Contact Owner +dataset.shareBtn=Share + +dataset.publishBtn=Publish Dataset +dataset.editBtn=Edit Dataset + +dataset.editBtn.itemLabel.upload=Files (Upload) +dataset.editBtn.itemLabel.metadata=Metadata +dataset.editBtn.itemLabel.terms=Terms +dataset.editBtn.itemLabel.permissions=Permissions +dataset.editBtn.itemLabel.thumbnailsAndWidgets=Thumbnails + Widgets +dataset.editBtn.itemLabel.privateUrl=Private URL +dataset.editBtn.itemLabel.permissionsDataset=Dataset +dataset.editBtn.itemLabel.permissionsFile=Restricted Files +dataset.editBtn.itemLabel.deleteDataset=Delete Dataset +dataset.editBtn.itemLabel.deleteDraft=Delete Draft Version +dataset.editBtn.itemLabel.deaccession=Deaccession Dataset +dataset.exportBtn=Export Metadata +dataset.exportBtn.itemLabel.ddi=DDI +dataset.exportBtn.itemLabel.dublinCore=Dublin Core +dataset.exportBtn.itemLabel.schemaDotOrg=Schema.org JSON-LD +dataset.exportBtn.itemLabel.datacite=DataCite +dataset.exportBtn.itemLabel.json=JSON +dataset.exportBtn.itemLabel.oai_ore=OAI_ORE +dataset.exportBtn.itemLabel.dataciteOpenAIRE=OpenAIRE +dataset.exportBtn.itemLabel.html=DDI HTML Codebook +metrics.title=Metrics +metrics.title.tip=View more metrics information +metrics.dataset.title=Dataset Metrics +metrics.dataset.tip.default=Aggregated metrics for this dataset. +metrics.dataset.tip.makedatacount=Metrics collected using Make Data Count standards. +metrics.dataset.views.tip=Dataset views are combined with both aggregated file views and file downloads. +metrics.dataset.downloads.default.tip=Total aggregated downloads of files in this dataset. +metrics.dataset.downloads.makedatacount.tip=Each file downloaded is counted as 1, and added to the total download count. +metrics.dataset.citations.tip=Click for a list of citation URLs. +metrics.file.title=File Metrics +metrics.file.tip.default=Metrics for this individual file. +metrics.file.tip.makedatacount=Individual file downloads are tracked in Dataverse but are not reported as part of the Make Data Count standard. +metrics.file.downloads.tip=Total downloads of this file. +metrics.views={0, choice, 0#Views|1#View|2#Views} +metrics.downloads={0, choice, 0#Downloads|1#Download|2#Downloads} +metrics.citations={0, choice, 0#Citations|1#Citation|2#Citations} +metrics.citations.dialog.header=Dataset Citations +metrics.citations.dialog.help=Citations for this dataset are retrieved from Crossref via DataCite using Make Data Count standards. For more information about dataset metrics, please refer to the User Guide. +metrics.citations.dialog.empty=Sorry, no citations were found. +dataset.publish.btn=Publish +dataset.publish.header=Publish Dataset +dataset.rejectBtn=Return to Author +dataset.submitBtn=Submit for Review +dataset.disabledSubmittedBtn=Submitted for Review +dataset.submitMessage=You will not be able to make changes to this dataset while it is in review. +dataset.submit.success=Your dataset has been submitted for review. +dataset.inreview.infoMessage=The draft version of this dataset is currently under review prior to publication. +dataset.submit.failure=Dataset Submission Failed - {0} +dataset.submit.failure.null=Can't submit for review. Dataset is null. +dataset.submit.failure.isReleased=Latest version of dataset is already released. Only draft versions can be submitted for review. +dataset.submit.failure.inReview=You cannot submit this dataset for review because it is already in review. +dataset.rejectMessage=Return this dataset to contributor for modification. +dataset.rejectMessage.label=Return to Author Reason +dataset.rejectWatermark=Please enter a reason for returning this dataset to its author(s). +dataset.reject.enterReason.error=Reason for return to author is required. +dataset.reject.success=This dataset has been sent back to the contributor. +dataset.reject.failure=Dataset Submission Return Failed - {0} +dataset.reject.datasetNull=Cannot return the dataset to the author(s) because it is null. +dataset.reject.datasetNotInReview=This dataset cannot be return to the author(s) because the latest version is not In Review. The author(s) needs to click Submit for Review first. +dataset.publish.tip=Are you sure you want to publish this dataset? Once you do so it must remain published. +dataset.publishBoth.tip=Once you publish this dataset it must remain published. +dataset.unregistered.tip= This dataset is unregistered. We will attempt to register it before publishing. +dataset.republish.tip=Are you sure you want to republish this dataset? +dataset.selectVersionNumber=Select if this is a minor or major version update. +dataset.updateRelease=Update Current Version (will permanently overwrite the latest published version) +dataset.majorRelease=Major Release +dataset.minorRelease=Minor Release +dataset.majorRelease.tip=Due to the nature of changes to the current draft this will be a major release ({0}) +dataset.mayNotBePublished=Cannot publish dataset. +dataset.mayNotPublish.administrator= This dataset cannot be published until {0} is published by its administrator. +dataset.mayNotPublish.both= This dataset cannot be published until {0} is published. Would you like to publish both right now? +dataset.mayNotPublish.twoGenerations= This dataset cannot be published until {0} and {1} are published. +dataset.mayNotBePublished.both.button=Yes, Publish Both +dataset.viewVersion.unpublished=View Unpublished Version +dataset.viewVersion.published=View Published Version +dataset.link.title=Link Dataset +dataset.link.save=Save Linked Dataset +dataset.link.not.to.owner=Can't link a dataset to its dataverse +dataset.link.not.to.parent.dataverse=Can't link a dataset to its parent dataverses +dataset.link.not.published=Can't link a dataset that has not been published +dataset.link.not.available=Can't link a dataset that has not been published or is not harvested +dataset.link.not.already.linked=Can't link a dataset that has already been linked to this dataverse +dataset.email.datasetContactTitle=Contact Dataset Owner +dataset.email.hiddenMessage= +dataset.email.messageSubject=Test Message Subject +dataset.email.datasetLinkBtn.tip=Link Dataset to Your Dataverse +dataset.share.datasetShare=Share Dataset +dataset.share.datasetShare.tip=Share this dataset on your favorite social media networks. +dataset.share.datasetShare.shareText=View this dataset. +dataset.locked.message=Dataset Locked +dataset.locked.message.details=This dataset is locked until publication. +dataset.locked.inReview.message=Submitted for Review +dataset.locked.ingest.message=The tabular data files uploaded are being processed and converted into the archival format +dataset.unlocked.ingest.message=The tabular files have been ingested. +dataset.locked.editInProgress.message=Edit In Progress +dataset.locked.editInProgress.message.details=Additional edits cannot be made at this time. Contact {0} if this status persists. +dataset.locked.pidNotReserved.message=Dataset DOI Not Reserved +dataset.locked.pidNotReserved.message.details=The DOI displayed in the citation for this dataset has not yet been reserved with DataCite. Please do not share this DOI until it has been reserved. +dataset.publish.error=This dataset may not be published due to an error when contacting the {0} Service. Please try again. +dataset.publish.error.doi=This dataset may not be published because the DOI update failed. +dataset.publish.file.validation.error.message=Failed to Publish Dataset +dataset.publish.file.validation.error.details=The dataset could not be published because one or more of the datafiles in the dataset could not be validated (physical file missing, checksum mismatch, etc.) +dataset.publish.file.validation.error.contactSupport=The dataset could not be published because one or more of the datafiles in the dataset could not be validated (physical file missing, checksum mismatch, etc.) Please contact support for further assistance. +dataset.publish.file.validation.error.noChecksumType=Checksum type not defined for datafile id {0} +dataset.publish.file.validation.error.failRead=Failed to open datafile id {0} for reading +dataset.publish.file.validation.error.failCalculateChecksum=Failed to calculate checksum for datafile id {0} +dataset.publish.file.validation.error.wrongChecksumValue=Checksum mismatch for datafile id {0} +dataset.compute.computeBatchSingle=Compute Dataset +dataset.compute.computeBatchList=List Batch +dataset.compute.computeBatchAdd=Add to Batch +dataset.compute.computeBatchClear=Clear Batch +dataset.compute.computeBatchRemove=Remove from Batch +dataset.compute.computeBatchCompute=Compute Batch +dataset.compute.computeBatch.success=The list of datasets in your compute batch has been updated. +dataset.compute.computeBatch.failure=The list of datasets in your compute batch failed to be updated. Please try again. +dataset.compute.computeBtn=Compute +dataset.compute.computeBatchListHeader=Compute Batch +dataset.compute.computeBatchRestricted=This dataset contains restricted files you may not compute on because you have not been granted access. +dataset.delete.error=Could not deaccession the dataset because the {0} update failed. +dataset.publish.workflow.message=Publish in Progress +dataset.publish.workflow.inprogress=This dataset is locked until publication. +dataset.pidRegister.workflow.inprogress=The dataset is locked while the persistent identifiers are being registered or updated, and/or the physical files are being validated. +dataset.versionUI.draft=Draft +dataset.versionUI.inReview=In Review +dataset.versionUI.unpublished=Unpublished +dataset.versionUI.deaccessioned=Deaccessioned +dataset.cite.title.released=DRAFT VERSION will be replaced in the citation with V1 once the dataset has been published. +dataset.cite.title.draft=DRAFT VERSION will be replaced in the citation with the selected version once the dataset has been published. +dataset.cite.title.deassessioned=DEACCESSIONED VERSION has been added to the citation for this version since it is no longer available. +dataset.cite.standards.tip=Learn about Data Citation Standards. +dataset.cite.downloadBtn=Cite Dataset +dataset.cite.downloadBtn.xml=EndNote XML +dataset.cite.downloadBtn.ris=RIS +dataset.cite.downloadBtn.bib=BibTeX +dataset.create.authenticatedUsersOnly=Only authenticated users can create datasets. +dataset.deaccession.reason=Deaccession Reason +dataset.beAccessedAt=The dataset can now be accessed at: +dataset.descriptionDisplay.title=Description +dataset.keywordDisplay.title=Keyword +dataset.subjectDisplay.title=Subject +dataset.contact.tip=Use email button above to contact. +dataset.asterisk.tip=Asterisks indicate required fields +dataset.message.uploadFiles.label=Upload Dataset Files +dataset.message.uploadFilesSingle.message=For more information about supported file formats, please refer to the User Guide. +dataset.message.uploadFilesMultiple.message=Multiple file upload/download methods are available for this dataset. Once you upload a file using one of these methods, your choice will be locked in for this dataset. +dataset.message.editMetadata.label=Edit Dataset Metadata +dataset.message.editMetadata.message=Add more metadata about this dataset to help others easily find it. +dataset.message.editMetadata.duplicateFilenames=Duplicate filenames: {0} +dataset.message.editTerms.label=Edit Dataset Terms +dataset.message.editTerms.message=Add the terms of use for this dataset to explain how to access and use your data. +dataset.message.locked.editNotAllowedInReview=Dataset cannot be edited due to In Review dataset lock. +dataset.message.locked.downloadNotAllowedInReview=Dataset file(s) may not be downloaded due to In Review dataset lock. +dataset.message.locked.downloadNotAllowed=Dataset file(s) may not be downloaded due to dataset lock. +dataset.message.locked.editNotAllowed=Dataset cannot be edited due to dataset lock. +dataset.message.locked.publishNotAllowed=Dataset cannot be published due to dataset lock. +dataset.message.createSuccess=This dataset has been created. +dataset.message.createSuccess.failedToSaveFiles=Partial Success: The dataset has been created. But the file(s) could not be saved. Please try uploading the file(s) again. +dataset.message.createSuccess.partialSuccessSavingFiles=Partial Success: The dataset has been created. But only {0} out of {1} files have been saved. Please try uploading the missing file(s) again. +dataset.message.linkSuccess= {0} has been successfully linked to {1}. +dataset.message.metadataSuccess=The metadata for this dataset has been updated. +dataset.message.termsSuccess=The terms for this dataset have been updated. +dataset.message.filesSuccess=The files for this dataset have been updated. +dataset.message.addFiles.Failure=Failed to add files to the dataset. Please try uploading the file(s) again. +dataset.message.addFiles.partialSuccess=Partial success: only {0} files out of {1} have been saved. Please try uploading the missing file(s) again. +dataset.message.publish.remind.draft=If it's ready for sharing, please publish it. +dataset.message.submit.remind.draft=If it's ready for sharing, please submit it for review. +dataset.message.publish.remind.version=If it's ready for sharing, please publish it so that others can see these changes. +dataset.message.submit.remind.version=If it's ready for sharing, please submit it for review so that others can see these changes. +dataset.message.publishSuccess=This dataset has been published. +dataset.message.only.authenticatedUsers=Only authenticated users may release Datasets. +dataset.message.deleteSuccess=This dataset has been deleted. +dataset.message.bulkFileUpdateSuccess=The selected files have been updated. +dataset.message.bulkFileDeleteSuccess=The selected files have been deleted. +datasetVersion.message.deleteSuccess=This dataset draft has been deleted. +datasetVersion.message.deaccessionSuccess=The selected version(s) have been deaccessioned. +dataset.message.deaccessionSuccess=This dataset has been deaccessioned. +dataset.message.publishFailure=The dataset could not be published. +dataset.message.metadataFailure=The metadata could not be updated. +dataset.message.filesFailure=The files could not be updated. +dataset.message.bulkFileDeleteFailure=The selected files could not be deleted. +dataset.message.files.ingestFailure=The file(s) could not be ingested. +dataset.message.deleteFailure=This dataset draft could not be deleted. +dataset.message.deaccessionFailure=This dataset could not be deaccessioned. +dataset.message.createFailure=The dataset could not be created. +dataset.message.termsFailure=The dataset terms could not be updated. +dataset.message.label.fileAccess=File Access +dataset.message.publicInstall=Files are stored on a publicly accessible storage server. +dataset.metadata.publicationDate=Publication Date +dataset.metadata.publicationDate.tip=The publication date of a dataset. +dataset.metadata.publicationYear=Publication Year +dataset.metadata.publicationYear.tip=The publication year of a dataset. +dataset.metadata.persistentId=Dataset Persistent ID +dataset.metadata.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +dataset.metadata.alternativePersistentId=Previous Dataset Persistent ID +dataset.metadata.alternativePersistentId.tip=A previously used persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +file.metadata.preview=Preview +file.metadata.filetags=File Tags +file.metadata.persistentId=File Persistent ID +file.metadata.persistentId.tip=The unique persistent identifier for a file, which can be a Handle or DOI in Dataverse. +dataset.versionDifferences.termsOfUseAccess=Terms of Use and Access +dataset.versionDifferences.termsOfUseAccessChanged=Terms of Use/Access Changed +dataset.versionDifferences.metadataBlock=Metadata Block +dataset.versionDifferences.field=Field +dataset.versionDifferences.changed=Changed +dataset.versionDifferences.from=From +dataset.versionDifferences.to=To +file.viewDiffDialog.fileAccess=Access +dataset.host.tip=Changing the host dataverse will clear any fields you may have entered data into. +dataset.template.tip=Changing the template will clear any fields you may have entered data into. +dataset.noTemplate.label=None +dataset.noSelectedFiles.header=Select File(s) +dataset.noSelectedFiles=Please select one or more files. +dataset.noSelectedFilesForDownload=Please select a file or files to be downloaded. +dataset.noSelectedFilesForRequestAccess=Please select a file or files for access request. +dataset.inValidSelectedFilesForDownload=Restricted Files Selected +dataset.noValidSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.mixedSelectedFilesForDownload=The restricted file(s) selected may not be downloaded because you have not been granted access. +dataset.downloadUnrestricted=Click Continue to download the files you have access to download. + +dataset.requestAccessToRestrictedFiles=You may request access to the restricted file(s) by clicking the Request Access button. +dataset.privateurl.infoMessageAuthor=Privately share this dataset before it is published: {0} +dataset.privateurl.infoMessageReviewer=This unpublished dataset is being privately shared. You will not be able to access it when logged into your Dataverse account. +dataset.privateurl.header=Unpublished Dataset Private URL +dataset.privateurl.tip=Use a Private URL to allow those without Dataverse accounts to access your unpublished dataset. For more information about the Private URL feature, please refer to the User Guide. +dataset.privateurl.absent=Private URL has not been created. +dataset.privateurl.createPrivateUrl=Create Private URL +dataset.privateurl.createPrivateUrl.anonymized=Create URL for Anonymized Access +dataset.privateurl.createPrivateUrl.anonymized.unavailable=Anonymized Access is not available once a version of the dataset has been published +dataset.privateurl.disablePrivateUrl=Disable Private URL +dataset.privateurl.disablePrivateUrlConfirm=Yes, Disable Private URL +dataset.privateurl.disableConfirmationText=Are you sure you want to disable the Private URL? If you have shared the Private URL with others they will no longer be able to use it to access your unpublished dataset. +dataset.privateurl.cannotCreate=Private URL can only be used with unpublished versions of datasets. +dataset.privateurl.roleassigeeTitle=Private URL Enabled +dataset.privateurl.createdSuccess=Success! +dataset.privateurl.full=This Private URL provides full read access to the dataset +dataset.privateurl.anonymized=This Private URL provides access to the anonymized dataset +dataset.privateurl.disabledSuccess=You have successfully disabled the Private URL for this unpublished dataset. +dataset.privateurl.noPermToCreate=To create a Private URL you must have the following permissions: {0}. +file.display.label=Change View +file.display.table=Table +file.display.tree=Tree +file.count.label=File Count +file.count.one=1 File +file.count={0} to {1} of {2} {2, choice, 0#Files|1#File|2#Files} +file.count.shown={0} {0, choice, 0#Files Selected|1#File|2#Files} +file.clearSelection=Clear selection. +file.zip.download.exceeds.limit=The overall size of the files selected ({0}) for download exceeds the zip limit of {1}. Please unselect some files to continue. +file.zip.download.exceeds.limit.info=The files selected are too large to download as a ZIP. +file.zip.download.exceeds.limit.detail=You can select individual files that are below the {2} download limit from the files table, or use the Data Access API for programmatic access to the files. +file.zip.download.exceeds.limit.header=Download Options +file.numFilesSelected={0} {0, choice, 0#files are|1#file is|2#files are} currently selected. +file.select.tooltip=Select Files +file.selectAllFiles=Select all {0} files in this dataset. +file.dynamicCounter.filesPerPage=Files Per Page +file.selectToAddBtn=Select Files to Add +file.selectToAdd.tipLimit=File upload limit is {0} per file. +file.selectToAdd.tipMoreInformation=Select files or drag and drop into the upload widget. +file.selectToAdd.dragdropMsg=Drag and drop files here. +file.createUploadDisabled=Upload files using rsync via SSH. This method is recommended for large file transfers. The upload script will be available on the Upload Files page once you save this dataset. +file.fromHTTP=Upload with HTTP via your browser +file.fromDropbox=Upload from Dropbox +file.fromDropbox.tip=Select files from Dropbox. +file.fromRsync=Upload with rsync + SSH via Data Capture Module (DCM) +file.api.httpDisabled=File upload via HTTP is not available for this installation of Dataverse. +file.api.alreadyHasPackageFile=File upload via HTTP disabled since this dataset already contains a package file. +file.replace.original=Original File +file.editFiles=Edit Files +file.editFilesSelected=Edit +file.editFile=Edit + +file.actionsBlock=File Actions +file.accessBtn=Access File +file.accessBtn.header.download=Download Options +file.optionsBtn=File Options +file.optionsBtn.header.edit=Edit Options +file.optionsBtn.header.configure=Configure Options +file.editBtn=Edit File +file.contactBtn=Contact Owner +file.shareBtn=Share +file.share.title=Share File +file.share.tip=Share this file on your favorite social media networks. +file.share.text=View this file. +file.bulkUpdate=Bulk Update +file.uploadFiles=Upload Files +file.replaceFile=Replace File +file.notFound.tip=There are no files in this dataset. +file.notFound.search=There are no files that match your search. Please change the search terms and try again. +file.noSelectedFiles.tip=There are no selected files to display. +file.noUploadedFiles.tip=Files you upload will appear here. +file.replace=Replace +file.alreadyDeleted.warning.header=Edit File +file.alreadyDeleted.previous.warningMessage=This file has already been deleted (or replaced) in the current version. It may not be edited. +file.delete=Delete +file.delete.duplicate.multiple=Delete Duplicate Files +file.delete.duplicate.single=Delete Duplicate File +file.metadata=Metadata +file.deleted.success=Files "{0}" will be permanently deleted from this version of this dataset once you click on the Save Changes button. +file.deleted.replacement.success=The replacement file has been deleted. +file.deleted.upload.success.single=File has been deleted and won\u2019t be included in this upload. +file.deleted.upload.success.multiple=Files have been deleted and won\u2019t be included in this upload. +file.editAccess=Edit Access +file.restrict=Restrict +file.unrestrict=Unrestrict +file.restricted.success=Files "{0}" will be restricted once you click on the Save Changes button. +file.download.header=Download +file.download.subset.header=Download Data Subset +file.preview=Preview: +file.fileName=File Name +file.type.tabularData=Tabular Data +file.originalChecksumType=Original File {0} +file.checksum.exists.tip=A file with this checksum already exists in the dataset. +file.selectedThumbnail=Thumbnail +file.selectedThumbnail.tip=The thumbnail for this file is used as the default thumbnail for the dataset. Click 'Advanced Options' button of another file to select that file. +file.cloudStorageAccess=Cloud Storage Access +file.cloudStorageAccess.tip=The container name for this dataset needed to access files in cloud storage. +file.cloudStorageAccess.help=To directly access this data in the {2} cloud environment, use the container name in the Cloud Storage Access box below. To learn more about the cloud environment, visit the Cloud Storage Access section of the User Guide. +file.copy=Copy +file.compute=Compute +file.rsyncUpload.info=Upload files using rsync + SSH. This method is recommended for large file transfers. Follow the steps below to upload your data. (User Guide - rsync Upload). +file.rsyncUpload.filesExist=You cannot upload additional files to this dataset. A dataset can only hold one data package. If you need to replace the data package in this dataset, please contact {0}. +file.rsyncUpload.noScriptBroken=The Data Capture Module failed to generate the rsync script. Please contact {0}. +file.rsyncUpload.noScriptBusy=Currently generating rsync script. If the script takes longer than ten minutes to generate, please contact {0}. +file.rsyncUpload.step1=Make sure your data is stored under a single directory. All files within this directory and its subdirectories will be uploaded to your dataset. +file.rsyncUpload.step2=Download this file upload script: +file.rsyncUpload.step2.downloadScriptButton=Download DCM Script +file.rsyncUpload.step3=Open a terminal window in the same directory you saved the script and run this command: bash ./{0} +file.rsyncUpload.step4=Follow the instructions in the script. It will ask for a full path (beginning with "/") to the directory containing your data. Note: this script will expire after 7 days. +file.rsyncUpload.inProgressMessage.summary=File Upload in Progress +file.rsyncUpload.inProgressMessage.details=This dataset is locked while the data files are being transferred and verified. +file.rsyncUpload.httpUploadDisabledDueToRsyncFileExisting=HTTP upload is disabled for this dataset because you have already uploaded files via rsync. If you would like to switch to HTTP upload, please contact {0}. +file.rsyncUpload.httpUploadDisabledDueToRsyncFileExistingAndPublished=HTTP upload is disabled for this dataset because you have already uploaded files via rsync and published the dataset. +file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttp=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP. If you would like to switch to rsync upload, then you must first remove all uploaded files from this dataset. Once this dataset is published, the chosen upload method is permanently locked in. +file.rsyncUpload.rsyncUploadDisabledDueFileUploadedViaHttpAndPublished=Upload with rsync + SSH is disabled for this dataset because you have already uploaded files via HTTP and published the dataset. +file.metaData.checksum.copy=Click to copy +file.metaData.dataFile.dataTab.unf=UNF +file.metaData.dataFile.dataTab.variables=Variables +file.metaData.dataFile.dataTab.observations=Observations +file.metaData.fileAccess=File Access: +file.addDescription=Add file description... +file.tags=Tags +file.editTags=Edit Tags +file.editTagsDialog.tip=Select existing file tags or create new tags to describe your files. Each file can have more than one tag. +file.editTagsDialog.select=File Tags +file.editTagsDialog.selectedTags=Selected Tags +file.editTagsDialog.selectedTags.none=No tags selected +file.editTagsDialog.add=Custom File Tag +file.editTagsDialog.add.tip=Creating a new tag will add it as a tag option for all files in this dataset. +file.editTagsDialog.newName=Add new file tag... +dataset.removeUnusedFileTags.label=Delete Tags +dataset.removeUnusedFileTags.tip=Select to delete Custom File Tags not used by the files in the dataset. +dataset.removeUnusedFileTags.check=Delete tags not being used +file.setThumbnail=Set Thumbnail +file.setThumbnail.header=Set Dataset Thumbnail +file.datasetThumbnail=Dataset Thumbnail +file.datasetThumbnail.tip=Select to use this image as the thumbnail image that is displayed in the search results for this dataset. +file.setThumbnail.confirmation=Are you sure you want to set this image as your dataset thumbnail? There is already an image uploaded to be the thumbnail and this action will remove it. +file.useThisIamge=Use this image as the dataset thumbnail image +file.advancedOptions=Advanced Options +file.advancedIngestOptions=Advanced Ingest Options +file.assignedDataverseImage.success={0} has been saved as the thumbnail for this dataset. +file.assignedTabFileTags.success=The tags were successfully added for {0}. +file.tabularDataTags=Tabular Data Tags +file.tabularDataTags.tip=Select a tag to describe the type(s) of data this is (survey, time series, geospatial, etc). +file.spss-savEncoding=Language Encoding +file.spss-savEncoding.title=Select the language used for encoding this SPSS (sav) Data file. +file.spss-savEncoding.current=Current Selection: +file.spss-porExtraLabels=Variable Labels +file.spss-porExtraLabels.title=Upload an additional text file with extra variable labels. +file.spss-porExtraLabels.selectToAddBtn=Select File to Add +file.ingestFailed.header=Upload Completed with Errors +file.ingestFailed.message=Tabular data ingest failed. +file.downloadBtn.format.all=All File Formats + Information +file.downloadBtn.format.tab=Tab-Delimited +file.downloadBtn.format.original={0} (Original File Format) +file.downloadBtn.format.rdata=RData +file.downloadBtn.format.var=Variable Metadata +file.downloadBtn.format.citation=Data File Citation +file.download.filetype.unknown=Original File Format +file.more.information.link=Link to more file information for +file.requestAccess=Request Access +file.requestAccess.dialog.msg=You need to Log In to request access. +file.requestAccess.dialog.msg.signup=You need to Sign Up or Log In to request access. +file.accessRequested=Access Requested +file.ingestInProgress=Ingest in progress... +file.dataFilesTab.metadata.header=Metadata +file.dataFilesTab.metadata.addBtn=Add + Edit Metadata +file.dataFilesTab.terms.header=Terms +file.dataFilesTab.terms.editTermsBtn=Edit Terms Requirements +file.dataFilesTab.terms.list.termsOfUse.header=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.waiver=Waiver +file.dataFilesTab.terms.list.termsOfUse.waiver.title=The waiver informs data downloaders how they can use this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt=CC0 - "Public Domain Dedication" +file.cc0.icon.alttxt=Creative Commons CC0 1.0 Public Domain Dedication icon +file.dataFilesTab.terms.list.termsOfUse.waiver.description=Datasets will default to a CC0 public domain dedication . CC0 facilitates reuse and extensibility of research data. Our Community Norms as well as good scientific practices expect that proper credit is given via citation. If you are unable to give datasets a CC0 waiver you may enter custom Terms of Use for datasets. +file.dataFilesTab.terms.list.termsOfUse.no.waiver.txt=No waiver has been selected for this dataset. +file.dataFilesTab.terms.list.termsOfUse.waiver.txt.description=Our Community Norms as well as good scientific practices expect that proper credit is given via citation. Please use the data citation above, generated by the Dataverse. +file.dataFilesTab.terms.list.termsOfUse.waiver.select.CCO=Yes, apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.notCCO=No, do not apply CC0 - "Public Domain Dedication" +file.dataFilesTab.terms.list.termsOfUse.waiver.select.tip=This is what end users will see displayed on this dataset +file.dataFilesTab.terms.list.termsOfUse.termsOfUse=Terms of Use +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.title=Outlines how this data can be used once downloaded. +file.dataFilesTab.terms.list.termsOfUse.termsOfUse.description=If you are unable to use CC0 for datasets you are able to set custom terms of use. Here is an example of a Data Usage Agreement for datasets that have de-identified human subject data. +file.dataFilesTab.terms.list.termsOfUse.addInfo=Additional Information +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration=Confidentiality Declaration +file.dataFilesTab.terms.list.termsOfUse.addInfo.declaration.title=Indicates whether signing of a confidentiality declaration is needed to access a resource. +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions=Special Permissions +file.dataFilesTab.terms.list.termsOfUse.addInfo.permissions.title=Determine if any special permissions are required to access a resource (e.g., if form is a needed and where to access the form). +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions=Restrictions +file.dataFilesTab.terms.list.termsOfUse.addInfo.restrictions.title=Any restrictions on access to or use of the collection, such as privacy certification or distribution restrictions, should be indicated here. These can be restrictions applied by the author, producer, or disseminator of the data collection. If the data are restricted to only a certain class of user, specify which type. +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements=Citation Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.citationRequirements.title=Include special/explicit citation requirements for data to be cited properly in articles or other publications that are based on analysis of the data. For standard data citation requirements refer to our Community Norms. +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements=Depositor Requirements +file.dataFilesTab.terms.list.termsOfUse.addInfo.depositorRequirements.title=Information regarding user responsibility for informing Dataset Depositors, Authors or Curators of their use of data through providing citations to the published work or providing copies of the manuscripts. +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions=Conditions +file.dataFilesTab.terms.list.termsOfUse.addInfo.conditions.title=Any additional information that will assist the user in understanding the access and use conditions of the Dataset. +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer=Disclaimer +file.dataFilesTab.terms.list.termsOfUse.addInfo.disclaimer.title=Information regarding responsibility for uses of the Dataset. +file.dataFilesTab.terms.list.termsOfAccess.header=Restricted Files + Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles=Restricted Files +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.title=The number of restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.restrictedFiles.txt=There {0, choice, 0#are|1#is|2#are} {0} restricted {0, choice, 0#files|1#file|2#files} in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess=Terms of Access +file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title=Information on how and if users can gain access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess=Request Access +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title=If checked, users can request access to the restricted files in this dataset. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.request=Users may request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.notRequest=Users may not request access to files. +file.dataFilesTab.terms.list.termsOfAccess.requestAccess.enableBtn=Enable access request +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace=Data Access Place +file.dataFilesTab.terms.list.termsOfAccess.addInfo.dataAccessPlace.title=If the data is not only in Dataverse, list the location(s) where the data are currently stored. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive=Original Archive +file.dataFilesTab.terms.list.termsOfAccess.addInfo.originalArchive.title=Archive from which the data was obtained. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus=Availability Status +file.dataFilesTab.terms.list.termsOfAccess.addInfo.availabilityStatus.title=Statement of Dataset availability. A depositor may need to indicate that a Dataset is unavailable because it is embargoed for a period of time, because it has been superseded, because a new edition is imminent, etc. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess=Contact for Access +file.dataFilesTab.terms.list.termsOfAccess.addInfo.contactForAccess.title=If different from the Dataset Contact, this is the Contact person or organization (include email or full address, and telephone number if available) that controls access to a collection. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection=Size of Collection +file.dataFilesTab.terms.list.termsOfAccess.addInfo.sizeOfCollection.tip=Summary of the number of physical files that exist in a Dataset, recording the number of files that contain data and noting whether the collection contains machine readable documentation and/or other supplementary files and information, such as code, data dictionaries, data definition statements, or data collection instruments. +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion=Study Completion +file.dataFilesTab.terms.list.termsOfAccess.addInfo.studyCompletion.title=Relationship of the data collected to the amount of data coded and stored in the Dataset. Information as to why certain items of collected information were not included in the dataset or a specific data file should be provided. +file.dataFilesTab.terms.list.guestbook=Guestbook +file.dataFilesTab.terms.list.guestbook.title=User information (i.e., name, email, institution, and position) will be collected when files are downloaded. +file.dataFilesTab.terms.list.guestbook.noSelected.tip=No guestbook is assigned to this dataset, you will not be prompted to provide any information on file download. +file.dataFilesTab.terms.list.guestbook.noSelected.admin.tip=There are no guestbooks available in {0} to assign to this dataset. +file.dataFilesTab.terms.list.guestbook.inUse.tip=The following guestbook will prompt a user to provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.viewBtn=Preview Guestbook +file.dataFilesTab.terms.list.guestbook.select.tip=Select a guestbook to have a user provide additional information when downloading a file. +file.dataFilesTab.terms.list.guestbook.noAvailable.tip=There are no guestbooks enabled in {0}. To create a guestbook, return to {0}, click the "Edit" button and select the "Dataset Guestbooks" option. +file.dataFilesTab.terms.list.guestbook.clearBtn=Clear Selection + +file.dataFilesTab.dataAccess=Data Access +file.dataFilesTab.dataAccess.info=This data file can be accessed through a terminal window, using the commands below. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.info.draft=Data files can not be accessed until the dataset draft has been published. For more information about downloading and verifying data, see our User Guide. +file.dataFilesTab.dataAccess.local.label=Local Access +file.dataFilesTab.dataAccess.download.label=Download Access +file.dataFilesTab.dataAccess.verify.label=Verify Data +file.dataFilesTab.dataAccess.local.tooltip=If this data is locally available to you, this is its file path. +file.dataFilesTab.dataAccess.download.tooltip=Download this data from your preferred mirror by running this command. +file.dataFilesTab.dataAccess.verify.tooltip=This command runs a checksum to verify the integrity of the data you have downloaded. +file.dataFilesTab.button.direct=Direct + +file.dataFilesTab.versions=Versions +file.dataFilesTab.versions.headers.dataset=Dataset +file.dataFilesTab.versions.headers.summary=Summary +file.dataFilesTab.versions.headers.contributors=Contributors +file.dataFilesTab.versions.headers.contributors.withheld=Contributor name(s) withheld +file.dataFilesTab.versions.headers.published=Published +file.dataFilesTab.versions.viewDiffBtn=View Differences +file.dataFilesTab.versions.citationMetadata=Citation Metadata: +file.dataFilesTab.versions.added=Added +file.dataFilesTab.versions.removed=Removed +file.dataFilesTab.versions.changed=Changed +file.dataFilesTab.versions.replaced=Replaced +file.dataFilesTab.versions.original=Original +file.dataFilesTab.versions.replacment=Replacement +file.dataFilesTab.versions.additionalCitationMetadata=Additional Citation Metadata: +file.dataFilesTab.versions.description.draft=This is a draft version. +file.dataFilesTab.versions.description.deaccessioned=Due to the previous version being deaccessioned, there are no difference notes available for this published version. +file.dataFilesTab.versions.description.firstPublished=This is the first published version. +file.dataFilesTab.versions.description.deaccessionedReason=Deaccessioned Reason: +file.dataFilesTab.versions.description.beAccessedAt=The dataset can now be accessed at: +file.dataFilesTab.versions.viewDetails.btn=View Details +file.dataFilesTab.versions.widget.viewMoreInfo=To view more information about the versions of this dataset, and to edit it if this is your dataset, please visit the full version of this dataset at the {2}. +file.dataFilesTab.versions.preloadmessage=(Loading versions...) +file.previewTab.externalTools.header=Available Previews +file.previewTab.button.label=Preview +file.previewTab.previews.not.available=Public previews are not available for this file. +file.deleteDialog.tip=Are you sure you want to delete this dataset and all of its files? You cannot undelete this dataset. +file.deleteDialog.header=Delete Dataset +file.deleteDraftDialog.tip=Are you sure you want to delete this draft version? Files will be reverted to the most recently published version. You cannot undelete this draft. +file.deleteDraftDialog.header=Delete Draft Version +file.deleteFileDialog.tip=The file(s) will be deleted after you click on the Save Changes button on the bottom of this page. +file.deleteFileDialog.immediate=The file will be deleted after you click on the Delete button. +file.deleteFileDialog.multiple.immediate=The file(s) will be deleted after you click on the Delete button. +file.deleteFileDialog.header=Delete Files +file.deleteFileDialog.failed.tip=Files will not be removed from previously published versions of the dataset. +file.deaccessionDialog.tip=Once you deaccession this dataset it will no longer be viewable by the public. +file.deaccessionDialog.version=Version +file.deaccessionDialog.reason.question1=Which version(s) do you want to deaccession? +file.deaccessionDialog.reason.question2=What is the reason for deaccession? +file.deaccessionDialog.reason.selectItem.identifiable=There is identifiable data in one or more files. +file.deaccessionDialog.reason.selectItem.beRetracted=The research article has been retracted. +file.deaccessionDialog.reason.selectItem.beTransferred=The dataset has been transferred to another repository. +file.deaccessionDialog.reason.selectItem.IRB=IRB request. +file.deaccessionDialog.reason.selectItem.legalIssue=Legal issue or Data Usage Agreement. +file.deaccessionDialog.reason.selectItem.notValid=Not a valid dataset. +file.deaccessionDialog.reason.selectItem.other=Other (Please type reason in space provided below) +file.deaccessionDialog.enterInfo=Please enter additional information about the reason for deaccession. +file.deaccessionDialog.leaveURL=If applicable, please leave a URL where this dataset can be accessed after deaccessioning. +file.deaccessionDialog.leaveURL.watermark=Optional dataset site, http://... +file.deaccessionDialog.deaccession.tip=Are you sure you want to deaccession? The selected version(s) will no longer be viewable by the public. +file.deaccessionDialog.deaccessionDataset.tip=Are you sure you want to deaccession this dataset? It will no longer be viewable by the public. +file.deaccessionDialog.dialog.selectVersion.error=Please select version(s) for deaccessioning. +file.deaccessionDialog.dialog.reason.error=Please select reason for deaccessioning. +file.deaccessionDialog.dialog.url.error=Please enter valid forwarding URL. +file.deaccessionDialog.dialog.textForReason.error=Please enter text for reason for deaccessioning. +file.deaccessionDialog.dialog.limitChar.error=Text for reason for deaccessioning may be no longer than {0} characters. +file.viewDiffDialog.header=Version Differences Details +file.viewDiffDialog.dialog.warning=Please select two versions to view the differences. +file.viewDiffDialog.notAvailable=N/A +file.viewDiffDialog.version=Version +file.viewDiffDialog.lastUpdated=Last Updated +file.viewDiffDialog.fileID=File ID +file.viewDiffDialog.fileName=Name +file.viewDiffDialog.fileType=Type +file.viewDiffDialog.fileSize=Size +file.viewDiffDialog.category=Tags +file.viewDiffDialog.description=Description +file.viewDiffDialog.provDescription=Provenance Description +file.viewDiffDialog.fileReplaced=File Replaced +file.viewDiffDialog.filesReplaced=File(s) Replaced +file.viewDiffDialog.files.header=Files +file.viewDiffDialog.msg.draftFound= This is the "DRAFT" version. +file.viewDiffDialog.msg.draftNotFound=The "DRAFT" version was not found. +file.viewDiffDialog.msg.versionFound= This is version "{0}". +file.viewDiffDialog.msg.versionNotFound=Version "{0}" was not found. +file.metadataTip=Metadata Tip: After adding the dataset, click the Edit Dataset button to add more metadata. +file.addBtn=Save Dataset +file.dataset.allFiles=All Files from this Dataset +file.downloadDialog.header=Dataset Terms +file.downloadDialog.tip=Please confirm and/or complete the information needed below in order to continue. +file.requestAccessTermsDialog.tip=Please confirm and/or complete the information needed below in order to request access to files in this dataset. +file.requestAccess.notAllowed=Requests for access are not accepted on the Dataset. +file.requestAccess.notAllowed.alreadyHasDownloadPermisssion=User already has permission to download this file. Request Access is invalid. + +file.search.placeholder=Search this dataset... +file.results.filter=Filter by +file.results.filter.type=File Type: +file.results.filter.access=Access: +file.results.filter.tag=File Tag: +file.results.filter.all=All +file.results.btn.sort=Sort +file.results.btn.sort.option.nameAZ=Name (A-Z) +file.results.btn.sort.option.nameZA=Name (Z-A) +file.results.btn.sort.option.newest=Newest +file.results.btn.sort.option.oldest=Oldest +file.results.btn.sort.option.size=Size +file.results.btn.sort.option.type=Type +file.compute.fileAccessDenied=This file is restricted and you may not compute on it because you have not been granted access. +file.configure.Button=Configure + +file.auxfiles.download.header=Download Auxiliary Files +# These types correspond to the AuxiliaryFile.Type enum. +file.auxfiles.types.DP=Differentially Private Statistics +# Add more types here +file.auxfiles.unspecifiedTypes=Other Auxiliary Files + +# dataset-widgets.xhtml +dataset.widgets.title=Dataset Thumbnail + Widgets +dataset.widgets.notPublished.why.header=Why Use Widgets? +dataset.widgets.notPublished.why.reason1=Increases the web visibility of your data by allowing you to embed your dataverse and datasets into your personal or project website. +dataset.widgets.notPublished.why.reason2=Allows others to browse your dataverse and datasets without leaving your personal or project website. +dataset.widgets.notPublished.how.header=How To Use Widgets +dataset.widgets.notPublished.how.tip1=To use widgets, your dataverse and datasets need to be published. +dataset.widgets.notPublished.how.tip2=After publishing, code will be available on this page for you to copy and add to your personal or project website. +dataset.widgets.notPublished.how.tip3=Do you have an OpenScholar website? If so, learn more about adding the Dataverse widgets to your website here. +dataset.widgets.notPublished.getStarted=To get started, publish your dataset. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.editAdvanced=Edit Advanced Options +dataset.widgets.editAdvanced.tip=Advanced Options – Additional options for configuring your widget on your personal or project website. +dataset.widgets.tip=Copy and paste this code into the HTML on your site. To learn more about Widgets, visit the Widgets section of the User Guide. +dataset.widgets.citation.txt=Dataset Citation +dataset.widgets.citation.tip=Add a citation for your dataset to your personal or project website. +dataset.widgets.datasetFull.txt=Dataset +dataset.widgets.datasetFull.tip=Add a way for visitors on your website to be able to view your datasets, download files, etc. +dataset.widgets.advanced.popup.header=Widget Advanced Options +dataset.widgets.advanced.prompt=Forward persistent URL's in your dataset citation to your personal website. +dataset.widgets.advanced.url.label=Personal Website URL +dataset.widgets.advanced.url.watermark=http://www.example.com/page-name +dataset.widgets.advanced.invalid.message=Please enter a valid URL +dataset.widgets.advanced.success.message=Successfully updated your Personal Website URL +dataset.widgets.advanced.failure.message=The dataverse Personal Website URL has not been updated. +dataset.thumbnailsAndWidget.breadcrumbs.title=Thumbnail + Widgets +dataset.thumbnailsAndWidget.thumbnails.title=Thumbnail +dataset.thumbnailsAndWidget.widgets.title=Widgets +dataset.thumbnailsAndWidget.thumbnailImage=Thumbnail Image +dataset.thumbnailsAndWidget.thumbnailImage.title=The logo or image file you wish to display as the thumbnail of this dataset. +dataset.thumbnailsAndWidget.thumbnailImage.tip=Supported image types are JPG, TIF, or PNG and should be no larger than {0} KB. The maximum display size for an image file as a dataset thumbnail is 48 pixels wide by 48 pixels high. +dataset.thumbnailsAndWidget.thumbnailImage.default=Default Icon +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable=Select Available File +dataset.thumbnailsAndWidget.thumbnailImage.selectThumbnail=Select Thumbnail +dataset.thumbnailsAndWidget.thumbnailImage.selectAvailable.title=Select a thumbnail from those available as image data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew=Upload New File +dataset.thumbnailsAndWidget.thumbnailImage.uploadNew.title=Upload an image file as your dataset thumbnail, which will be stored separately from the data files that belong to your dataset. +dataset.thumbnailsAndWidget.thumbnailImage.upload=Upload Image +dataset.thumbnailsAndWidget.thumbnailImage.upload.invalidMsg=The image could not be uploaded. Please try again with a JPG, TIF, or PNG file. +dataset.thumbnailsAndWidget.thumbnailImage.alt=Thumbnail image selected for dataset +dataset.thumbnailsAndWidget.success=Dataset thumbnail updated. +dataset.thumbnailsAndWidget.removeThumbnail=Remove Thumbnail +dataset.thumbnailsAndWidget.removeThumbnail.tip=You are only removing this image as the dataset thumbnail, not removing it from your dataset. To do that, go to the Edit Files page. +dataset.thumbnailsAndWidget.availableThumbnails=Available Thumbnails +dataset.thumbnailsAndWidget.availableThumbnails.tip=Select a thumbnail from the data files that belong to your dataset. Continue back to the Thumbnail + Widgets page to save your changes. + +# file.xhtml +file.share.fileShare=Share File +file.share.fileShare.tip=Share this file on your favorite social media networks. +file.share.fileShare.shareText=View this file. +file.title.label=Title +file.citation.label=Citation +file.citation.notice=This file is part of "{0}". +file.citation.dataset=Dataset Citation +file.citation.datafile=File Citation +file.cite.downloadBtn=Cite Dataset +file.cite.file.downloadBtn=Cite Data File +file.pid.label=File Persistent ID: +file.unf.lable= File UNF: +file.general.metadata.label=General Metadata +file.description.label=Description +file.tags.label=Tags +file.lastupdated.label=Last Updated +file.DatasetVersion=Version + +file.previewTab.tool.open=Open +file.previewTab.header=Preview +file.previewTab.presentation=File Preview Tool +file.previewTab.openBtn=Open in New Window +file.previewTab.exploreBtn={0} on {1} +file.metadataTab.fileMetadata.header=File Metadata +file.metadataTab.fileMetadata.persistentid.label=Data File Persistent ID +file.metadataTab.fileMetadata.downloadUrl.label=Download URL +file.metadataTab.fileMetadata.downloadUrl.info=Use the Download URL in a Wget command or a download manager to avoid interrupted downloads, time outs or other failures. User Guide - Downloading via URL +file.metadataTab.fileMetadata.unf.label=File UNF +file.metadataTab.fileMetadata.size.label=Size +file.metadataTab.fileMetadata.type.label=Type +file.metadataTab.fileMetadata.description.label=Description +file.metadataTab.fileMetadata.publicationDate.label=Publication Date +file.metadataTab.fileMetadata.depositDate.label=Deposit Date +file.metadataTab.fileMetadata.hierarchy.label=File Path +file.metadataTab.fileMetadata.hierarchy.tip=Hierarchical directory structure path used to display file organization and support reproducibility. +file.metadataTab.fitsMetadata.header=FITS Metadata + +file.versionDifferences.noChanges=No changes associated with this version +file.versionDifferences.fileNotInVersion=File not included in this version +file.versionDifferences.actionChanged=Changed +file.versionDifferences.actionAdded=Added +file.versionDifferences.actionRemoved=Removed +file.versionDifferences.actionReplaced=Replaced +file.versionDifferences.fileMetadataGroupTitle=File Metadata +file.versionDifferences.fileTagsGroupTitle=File Tags +file.versionDifferences.descriptionDetailTitle=Description +file.versionDifferences.provenanceDetailTitle=Provenance +file.versionDifferences.fileNameDetailTitle=File Name +file.versionDifferences.fileAccessTitle=File Access +file.versionDifferences.fileRestricted=Restricted +file.versionDifferences.fileUnrestricted=Unrestricted +file.versionDifferences.fileGroupTitle=File + +file.anonymized.authorsWithheld=Author name(s) withheld +# File Ingest +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. + +# editdatafile.xhtml + +# editFilesFragment.xhtml +file.edit.error.file_exceeds_limit=This file exceeds the size limit. +# File metadata error +file.metadata.datafiletag.not_tabular=You cannot add Tabular Data Tags to a non-tabular file. +file.metadata.filedirectory.invalidCharacters=Directory Name cannot contain invalid characters. Valid characters are a-Z, 0-9, '_', '-', '.', '\\', '/' and ' ' (white space). + +# File Edit Success +file.message.editSuccess=The file has been updated. +file.message.deleteSuccess=The file has been deleted. +file.message.replaceSuccess=The file has been replaced. + +# File Add/Replace operation messages +file.addreplace.file_size_ok=File size is in range. +file.addreplace.error.byte_abrev=B +file.addreplace.error.file_exceeds_limit=This file size ({0}) exceeds the size limit of {1}. +file.addreplace.error.dataset_is_null=The dataset cannot be null. +file.addreplace.error.dataset_id_is_null=The dataset ID cannot be null. +file.addreplace.error.parsing=Error in parsing provided json +file.addreplace.warning.unzip.failed=Failed to unzip the file. Saving the file as is. +file.addreplace.warning.unzip.failed.size=A file contained in this zip file exceeds the size limit of {0}. This Dataverse installation will save and display the zipped file, rather than unpacking and displaying files. +find.dataset.error.dataset_id_is_null=When accessing a dataset based on Persistent ID, a {0} query parameter must be present. +find.dataset.error.dataset.not.found.persistentId=Dataset with Persistent ID {0} not found. +find.dataset.error.dataset.not.found.id=Dataset with ID {0} not found. +find.dataset.error.dataset.not.found.bad.id=Bad dataset ID number: {0}. +find.datasetlinking.error.not.found.ids=Dataset linking dataverse with dataset ID {0} and dataset linking dataverse ID {1} not found. +find.datasetlinking.error.not.found.bad.ids=Bad dataset ID number: {0} or dataset linking dataverse ID number: {1}. +find.dataverselinking.error.not.found.ids=Dataverse linking dataverse with dataverse ID {0} and dataverse linking dataverse ID {1} not found. +find.dataverselinking.error.not.found.bad.ids=Bad dataverse ID number: {0} or dataverse linking dataverse ID number: {1}. +find.datafile.error.datafile.not.found.id=File with ID {0} not found. +find.datafile.error.datafile.not.found.bad.id=Bad file ID number: {0}. +find.datafile.error.dataset.not.found.persistentId=Datafile with Persistent ID {0} not found. +find.dataverse.role.error.role.not.found.id=Dataverse Role with ID {0} not found. +find.dataverse.role.error.role.not.found.bad.id=Bad Dataverse Role ID number: {0} +find.dataverse.role.error.role.not.found.alias=Dataverse Role with alias {0} not found. +find.dataverse.role.error.role.builtin.not.allowed=May not delete Built In Role {0}. +file.addreplace.error.dataset_id_not_found=There was no dataset found for ID: +file.addreplace.error.no_edit_dataset_permission=You do not have permission to edit this dataset. +file.addreplace.error.filename_undetermined=The file name cannot be determined. +file.addreplace.error.file_content_type_undetermined=The file content type cannot be determined. +file.addreplace.error.file_upload_failed=The file upload failed. +file.addreplace.warning.duplicate_file=This file has the same content as {0} that is in the dataset. +file.addreplace.error.duplicate_file.continue=You may delete if it was not intentional. +file.addreplace.error.existing_file_to_replace_id_is_null=The ID of the existing file to replace must be provided. +file.addreplace.error.existing_file_to_replace_not_found_by_id=Replacement file not found. There was no file found for ID: {0} +file.addreplace.error.existing_file_to_replace_is_null=The file to replace cannot be null. +file.addreplace.error.existing_file_to_replace_not_in_dataset=The file to replace does not belong to this dataset. +file.addreplace.error.existing_file_not_in_latest_published_version=You cannot replace a file that is not in the most recently published dataset. (The file is unpublished or was deleted from a previous version.) +file.addreplace.content_type.header=File Type Different +file.addreplace.already_exists.header=Duplicate File Uploaded +file.addreplace.already_exists.header.multiple=Duplicate Files Uploaded +file.addreplace.error.replace.new_file_has_different_content_type=The original file ({0}) and replacement file ({1}) are different file types. +file.addreplace.error.replace.new_file_same_as_replacement=Error! You may not replace a file with a file that has duplicate content. +file.addreplace.error.unpublished_file_cannot_be_replaced=You cannot replace an unpublished file. Please delete it instead of replacing it. +file.addreplace.error.ingest_create_file_err=There was an error when trying to add the new file. +file.addreplace.error.initial_file_list_empty=An error occurred and the new file was not added. +file.addreplace.error.initial_file_list_more_than_one=You cannot replace a single file with multiple files. The file you uploaded was ingested into multiple files. +file.addreplace.error.final_file_list_empty=There are no files to add. (This error should not happen if steps called in sequence.) +file.addreplace.error.only_replace_operation=This should only be called for file replace operations! +file.addreplace.error.failed_to_remove_old_file_from_dataset=Unable to remove old file from new DatasetVersion. +file.addreplace.error.add.add_file_error=Failed to add file to dataset. +file.addreplace.error.phase2_called_early_no_new_files=There was an error saving the dataset - no new files found. +file.addreplace.success.add=File successfully added! +file.addreplace.success.replace=File successfully replaced! +file.addreplace.error.auth=The API key is invalid. +file.addreplace.error.invalid_datafile_tag=Not a valid Tabular Data Tag: + + + +# 500.xhtml +error.500.page.title=500 Internal Server Error +error.500.message=Internal Server Error - An unexpected error was encountered, no more information is available. + +# 404.xhtml +error.404.page.title=404 Not Found +error.404.message=Page Not Found - The page you are looking for was not found. + +# 403.xhtml +error.403.page.title=403 Not Authorized +error.403.message=Not Authorized - You are not authorized to view this page. + +# general error - support message +error.support.message= If you believe this is an error, please contact {0} for assistance. + +# citation-frame.xhtml +citationFrame.banner.message=If the site below does not load, the archived data can be found in the {0} {1}. {2} +citationFrame.banner.message.here=here +citationFrame.banner.closeIcon=Close this message, go to dataset +citationFrame.banner.countdownMessage= This message will close in +citationFrame.banner.countdownMessage.seconds=seconds + +# Friendly AuthenticationProvider names +authenticationProvider.name.builtin=Dataverse +authenticationProvider.name.null=(provider is unknown) +authenticationProvider.name.github=GitHub +authenticationProvider.name.google=Google +authenticationProvider.name.orcid=ORCiD +authenticationProvider.name.orcid-sandbox=ORCiD Sandbox +authenticationProvider.name.shib=Shibboleth +ingest.csv.invalidHeader=Invalid header row. One of the cells is empty. +ingest.csv.lineMismatch=Mismatch between line counts in first and final passes!, {0} found on first pass, but {1} found on second. +ingest.csv.recordMismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +ingest.csv.nullStream=Stream can't be null. +citationFrame.banner.countdownMessage.seconds=seconds + +#file-edit-popup-fragment.xhtml #editFilesFragment.xhtml +dataset.access.accessHeader=Restrict Files and Add Dataset Terms of Access +dataset.access.description=Restricting limits access to published files. You can add or edit Terms of Access for the dataset, and allow people to Request Access to restricted files. + +#datasetFieldForEditFragment.xhtml +dataset.AddReplication=Add "Replication Data for" to Title +dataset.replicationDataFor=Replication Data for: + + +#mydata_fragment.xhtml +mydataFragment.infoAccess=Here are all the dataverses, datasets, and files you have access to. You can filter through them by publication status and roles. +mydataFragment.moreResults=View More Results +mydataFragment.publicationStatus=Publication Status +mydataFragment.roles=Roles +mydataFragment.resultsByUserName=Results by Username +mydataFragment.search=Search my data... +mydata.result=Result +mydata.results=Results +mydata.viewnext=View Next +mydata.more=More + +file.provenance=Provenance +file.editProvenanceDialog=Provenance +file.editProvenanceDialog.tip=Provenance is a record of the origin of your data file and any transformations it has been through. Upload a JSON file from a provenance capture tool to generate a graph of your data''s provenance. For more information, please refer to our User Guide. +file.editProvenanceDialog.uploadSuccess=Upload complete +file.editProvenanceDialog.uploadError=An error occurred during upload and parsing of your provenance file. +file.editProvenanceDialog.noEntitiesError=The uploaded provenance file does not contain any entities that can be related to your Data File. +file.editProvenanceDialog.invalidSchemaError=The uploaded provenance file does not comply with the W3C Provenance standard. +file.editProvenanceDialog.bundleFile=Provenance File +file.editProvenanceDialog.bundleFile.instructions=File must be JSON format and follow the W3C standard. +file.editProvenanceDialog.bundleFile.alreadyPublished=This Provenance File has been published and cannot be replaced or removed. +file.editProvenanceDialog.bundleEntity=Data File Entity +file.editProvenanceDialog.bundleEntity.placeholder=Connect entity... +file.editProvenanceDialog.bundleEntity.requiredValidation=Value is required. +file.editProvenanceDialog.bundleEntity.tip=Select the entity in your provenance file which represents your data file. +file.editProvenanceDialog.bundleEntity.nameHeader=Name +file.editProvenanceDialog.bundleEntity.typeHeader=Type +file.editProvenanceDialog.bundleEntity.entityHeader=Entity +file.editProvenanceDialog.selectToAddBtn=Select File +file.editProvenanceDialog.description.tip=You may also add information documenting the history of your data file, including how it was created, how it has changed, and who has worked with it. +file.editProvenanceDialog.description=Provenance Description +file.editProvenanceDialog.description.placeholder=Add provenance description... +file.confirmProvenanceDialog=Provenance +file.confirmProvenanceDialog.tip1=Once you publish this dataset, your provenance file can not be edited or replaced. +file.confirmProvenanceDialog.tip2=Select "Cancel" to return the previous page, where you can preview your provenance file to confirm it is correct. +file.metadataTab.provenance.header=File Provenance +file.metadataTab.provenance.body=File Provenance information coming in a later release... +file.metadataTab.provenance.error=Due to an internal error, your provenance information was not correctly saved. +file.metadataTab.provenance.message=Your provenance information has been received. Please click Save Changes below to ensure all data is added to your dataset. + +file.provConfirm.unpublished.json=Your Provenance File will become permanent upon publishing your dataset. Please preview to confirm before publishing. +file.provConfirm.published.json=Your Provenance File will become permanent once you click Save Changes. Please preview to confirm before you Save Changes. +file.provConfirm.freeform=Your Provenance Description is not permanent; it can be updated at any time. +file.provConfirm.empty=No changes have been made. + +file.provAlert.published.json=Your Provenance File changes have been saved to the Dataset. +file.provAlert.unpublished.json=Your Provenance File changes will be saved to this version of the Dataset once you click on the Save Changes button. +file.provAlert.freeform=Your Provenance Description changes will be saved to this version of the Dataset once you click on the Save Changes button. +file.provAlert.filePage.published.json=Your Provenance File changes have been saved to the Dataset. +file.provAlert.filePage.unpublished.json=Your Provenance File changes have been saved to this version of the Dataset. +file.provAlert.filePage.freeform=Your Provenance Description changes have been saved to this version of the Dataset. + +api.prov.provJsonSaved=PROV-JSON provenance data saved for Data File: +api.prov.provJsonDeleted=PROV-JSON deleted for the selected Data File. + +api.prov.error.provDisabled=This functionality has been administratively disabled. +api.prov.error.badDataFileId=Invalid DataFile ID. +api.prov.error.jsonUpdateNotAllowed=PROV-JSON cannot be updated for a published file that already has PROV-JSON. +api.prov.error.entityMismatch=Entity name provided does not match any entities parsed from the uploaded PROV-JSON. +api.prov.error.jsonDeleteNotAllowed=PROV-JSON cannot be deleted for a published file. +api.prov.error.jsonNoContent=No provenance json available for this file. +api.prov.error.freeformInvalidJson=A valid JSON object could not be found. +api.prov.error.freeformMissingJsonKey=The JSON object you send must have a key called 'text'. +api.prov.error.freeformNoText=No provenance free form text available for this file. +api.prov.error.noDataFileFound=Could not find a file based on ID. + +bagit.sourceOrganization=Dataverse Installation () +bagit.sourceOrganizationAddress= +bagit.sourceOrganizationEmail= + +#Permission.java +permission.addDataverseDataverse=Add a dataverse within another dataverse +permission.deleteDataset=Delete a dataset draft +permission.deleteDataverse=Delete an unpublished dataverse +permission.publishDataset=Publish a dataset +permission.publishDataverse=Publish a dataverse +permission.managePermissionsDataset=Manage permissions for a dataset +permission.managePermissionsDataverse=Manage permissions for a dataverse +permission.editDataset=Edit a dataset's metadata +permission.editDataverse=Edit a dataverse's metadata, facets, customization, and templates +permission.downloadFile=Download a file +permission.viewUnpublishedDataset=View an unpublished dataset and its files +permission.viewUnpublishedDataverse=View an unpublished dataverse +permission.addDatasetDataverse=Add a dataset to a dataverse + +#DataverseUserPage.java +userPage.informationUpdated=Your account information has been successfully updated. +userPage.passwordChanged=Your account password has been successfully changed. +confirmEmail.changed=Your email address has changed and must be re-verified. Please check your inbox at {0} and follow the link we''ve sent. \n\nAlso, please note that the link will only work for the next {1} before it has expired. + +#Dataset.java +dataset.category.documentation=Documentation +dataset.category.data=Data +dataset.category.code=Code + +#DatasetVersionDifference.java +dataset.version.file.added=Files (Added: {0} +dataset.version.file.removed=Files (Removed: {0} +dataset.version.file.removed2=; Removed: {0} +dataset.version.file.replaced=Files (Replaced: {0} +dataset.version.file.replaced2=; Replaced: {0} +dataset.version.file.changed=Files (Changed File Metadata: {0} +dataset.version.file.changed2=; Changed File Metadata: {0} +dataset.version.variablemetadata.changed=Variable Metadata (Changed Variable Metadata: {0} +dataset.version.variablemetadata.changed2=; Changed Variable Metadata: {0} + +#DataversePage.java +dataverse.item.required=Required +dataverse.item.required.conditional=Conditionally Required +dataverse.item.optional=Optional +dataverse.item.hidden=Hidden +dataverse.edit.msg=Edit Dataverse +dataverse.edit.detailmsg=Edit your dataverse and click Save Changes. Asterisks indicate required fields. +dataverse.feature.update=The featured dataverses for this dataverse have been updated. +dataverse.link.select=You must select a linking dataverse. +dataset.noSelectedDataverse.header=Select Dataverse(s) +dataverse.link.user=Only authenticated users can link a dataverse. +dataverse.link.error=Unable to link {0} to {1}. An internal error occurred. +dataverse.search.user=Only authenticated users can save a search. +dataverse.alias=alias +dataverse.alias.taken=This Alias is already taken. + +#editDatafilesPage.java +dataset.save.fail=Dataset Save Failed + +dataset.files.exist=Files {0} have the same content as {1} that already exists in the dataset. +dataset.file.exist=File {0} has the same content as {1} that already exists in the dataset. +dataset.file.exist.test={0, choice, 1#File |2#Files |} {1} {0, choice, 1#has |2#have |} the same content as {2} that already {0, choice, 1#exist |2#exist |}in the dataset. +dataset.files.duplicate=Files {0} have the same content as {1} that have already been uploaded. +dataset.file.duplicate=File {0} has the same content as {1} that has already been uploaded. +dataset.file.inline.message= This file has the same content as {0}. +dataset.file.upload=Successful {0} is uploaded. +dataset.file.upload.setUp.rsync.failed=Rsync upload setup failed! +dataset.file.upload.setUp.rsync.failed.detail=Unable to find appropriate storage driver. +dataset.file.uploadFailure=upload failure +dataset.file.uploadFailure.detailmsg=the file {0} failed to upload! +dataset.file.uploadWarning=upload warning +dataset.file.uploadWorked=upload worked +dataset.file.upload.popup.explanation.tip=For more information, please refer to the Duplicate Files section of the User Guide. + +#EmailValidator.java +email.invalid=is not a valid email address. + +#URLValidator.java +url.invalid=is not a valid URL. + +#HarvestingClientsPage.java +harvest.start.error=Sorry, harvest could not be started for the selected harvesting client configuration (unknown server error). +harvest.delete.error=Selected harvesting client cannot be deleted; unknown exception: +harvest.create.error=Failed to create a new Harvesting Client configuration: no destination dataverse selected. +harvest.createCommand.error=Harvesting client creation command failed +harvest.create.fail=Harvesting client creation failed (reason unknown). +harvest.update.success=Successfully updated harvesting client +harvest.save.failure1=Failed to save harvesting client +harvest.save.failure2=Failed to save harvesting client (reason unknown). + +#HarvestingSetsPage.java +harvest.oaicreate.fail=Failed to create OAI set +harvest.oaicreate.defaultset.fail=Failed to create the default OAI set +harvest.oaiupdate.fail=Failed to update OAI set. +harvest.oaiupdate.success=Successfully updated OAI set "{0}". +harvest.delete.fail=Failed to delete harvesting set; unknown exception: +harvest.reexport.fail=Sorry, could not start re-export on selected OAI set (unknown server error). +harvest.search.failed=Search failed for the query provided. Message from the Dataverse search server: + +#LoginPage.java +login.Username/Email=Please enter a Username +login.Password=Please enter a Password + +#SystemConfig.java +system.app.terms=There are no Terms of Use for this Dataverse installation. +system.api.terms=There are no API Terms of Use for this Dataverse installation. + +#DatasetPage.java +dataverse.notreleased=DataverseNotReleased +dataverse.release.authenticatedUsersOnly=Only authenticated users can release a dataverse. +dataset.registration.failed=Dataset Registration Failed +dataset.registered=DatasetRegistered +dataset.registered.msg=Your dataset is now registered. +dataset.notlinked=DatasetNotLinked +dataset.notlinked.msg=There was a problem linking this dataset to yours: +datasetversion.archive.success=Archival copy of Version successfully submitted +datasetversion.archive.failure=Error in submitting an archival copy +datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. +datasetversion.update.archive.failure=Dataset Version Update succeeded, but the attempt to update the archival copy failed. +datasetversion.update.success=The published version of your Dataset has been updated. +datasetversion.update.archive.success=The published version of your Dataset, and its archival copy, have been updated. + +#ThemeWidgetFragment.java +theme.validateTagline=Tagline must be at most 140 characters. +theme.urlValidate=URL validation failed. +theme.urlValidate.msg=Please provide URL. +dataverse.save.failed=Dataverse Save Failed - + +#LinkValidator.java +link.tagline.validate=Please enter a tagline for the website to be hyperlinked with. + +#TemplatePage.java +template.save.fail=Template Save Failed +template.create=Template has been created. +template.save=Template has been edited and saved. + +#GuestbookPage.java +guestbook.save.fail=Guestbook Save Failed +guestbook.option.msg= - An Option question requires multiple options. Please complete before saving. +guestbook.create=The guestbook has been created. +guestbook.save=The guestbook has been edited and saved. + +#Shib.java +shib.invalidEmailAddress=The SAML assertion contained an invalid email address: "{0}". +shib.emailAddress.error=A single valid address could not be found. +shib.nullerror=The SAML assertion for "{0}" was null. Please contact support. +dataverse.shib.success=Your Dataverse account is now associated with your institutional account. +shib.convert.fail.deactivated=Your existing account cannot be converted because it has been deactivated. +shib.createUser.fail=Couldn't create user. +shib.duplicate.email.error=Cannot login, because the e-mail address associated with it has changed since previous login and is already in use by another account. + +#IngestServiceBean.java +ingest.failed=ingest failed + +#ManagePermissionsPage.java +permission.roleWasRemoved={0} role for {1} was removed. +permission.defaultPermissionDataverseUpdated=The default permissions for this dataverse have been updated. +permission.roleAssignedToFor={0} role assigned to {1} for {2}. +permission.roleNotAssignedFor={0} role could NOT be assigned to {1} for {2}. +permission.updated=updated +permission.created=created +permission.roleWas=The role was {0}. To assign it to a user and/or group, click on the Assign Roles to Users/Groups button in the Users/Groups section of this page. +permission.roleNotSaved=The role was not able to be saved. +permission.permissionsMissing=Permissions {0} missing. +permission.CannotAssigntDefaultPermissions=Cannot assign default permissions. +permission.default.contributor.role.none.decription=A person who has no permissions on a newly created dataset. Not recommended for dataverses with human contributors. +permission.default.contributor.role.none.name=None +permission.role.must.be.created.by.superuser=Roles can only be created or edited by superusers. +permission.role.not.created.alias.already.exists=Role with this alias already exists. + +#ManageFilePermissionsPage.java +permission.roleNotAbleToBeRemoved=The role assignment was not able to be removed. +permission.fileAccessGranted=File Access request by {0} was granted. +permission.fileAccessRejected=File Access request by {0} was rejected. +permission.roleNotAbleToBeAssigned=The role was not able to be assigned. + +#ManageGroupsPage.java +dataverse.manageGroups.create.success=Successfully created group {0}. Refresh to update your page. +dataverse.manageGroups.save.success=Successfully saved group {0}. +dataverse.manageGroups.delete=The group has been deleted. +dataverse.manageGroups.nodelete=The explicit group cannot be deleted. +dataverse.manageGroups.create.fail=Group Creation failed. +dataverse.manageGroups.edit.fail=Group edit failed. +dataverse.manageGroups.save.fail=Group Save failed. + +#ManageTemplatesPage.java +template.makeDefault=The template has been selected as the default template for this dataverse +template.unselectDefault=The template has been removed as the default template for this dataverse +template.clone=The template has been copied +template.clone.error=Template could not be copied. +template.delete=The template has been deleted +template.delete.error=The dataset template cannot be deleted. +template.update=Template data updated +template.update.error=Template update failed +template.makeDefault.error=The dataset template cannot be made default. +page.copy=Copy of + +#RolePermissionFragment.java +permission.roleAssignedToOn=Role {0} assigned to {1} on {2} +permission.cannotAssignRole=Can''t assign role: {0} +permission.roleRevoked=Role assignment revoked successfully +permission.cannotRevokeRole1=Cannot revoke role assignment - you''re missing permission {0} +permission.cannotRevokeRole2=Cannot revoke role assignment: {0} +permission.roleSave=Role "{0}" saved +permission.cannotSaveRole=Cannot save role {0} + +#GlobalId.java +pid.allowedCharacters=^[A-Za-z0-9._/:\\-]* + +#General Command Exception +command.exception.only.superusers={1} can only be called by superusers. +command.exception.user.deactivated={0} failed: User account has been deactivated. +command.exception.user.deleted={0} failed: User account has been deleted. + +#Admin-API +admin.api.auth.mustBeSuperUser=Forbidden. You must be a superuser. +admin.api.migrateHDL.failure.must.be.set.for.doi=May not migrate while installation protocol set to "hdl". Protocol must be "doi" +admin.api.migrateHDL.failure.must.be.hdl.dataset=Dataset was not registered as a HDL. It cannot be migrated. +admin.api.migrateHDL.success=Dataset migrate HDL registration complete. Dataset re-registered successfully. +admin.api.migrateHDL.failure=Failed to migrate Dataset Handle id: {0} +admin.api.migrateHDL.failureWithException=Failed to migrate Dataset Handle id: {0} Unexpected exception: {1} +admin.api.deleteUser.failure.prefix=Could not delete Authenticated User {0} because +admin.api.deleteUser.failure.dvobjects= the user has created Dataverse object(s) +admin.api.deleteUser.failure.gbResps= the user is associated with file download (Guestbook Response) record(s) +admin.api.deleteUser.failure.roleAssignments=the user is associated with role assignment record(s) +admin.api.deleteUser.failure.versionUser=the user has contributed to dataset version(s) +admin.api.deleteUser.failure.savedSearches=the user has created saved searches +admin.api.deleteUser.success=Authenticated User {0} deleted. + +#Files.java +files.api.metadata.update.duplicateFile=Filename already exists at {0} + +#Datasets.java +datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. +datasets.api.updatePIDMetadata.auth.mustBeSuperUser=Forbidden. You must be a superuser. +datasets.api.updatePIDMetadata.success.for.single.dataset=Dataset {0} PID Metadata updated successfully. +datasets.api.updatePIDMetadata.success.for.update.all=All Dataset PID Metadata update completed successfully. +datasets.api.moveDataset.error.targetDataverseNotFound=Target dataverse not found. +datasets.api.moveDataset.error.suggestForce=Use the query parameter forceMove=true to complete the move. +datasets.api.moveDataset.success=Dataset moved successfully. +datasets.api.listing.error=Fatal error trying to list the contents of the dataset. Please report this error to the Dataverse administrator. +datasets.api.datasize.storage=Total size of the files stored in this dataset: {0} bytes +datasets.api.datasize.download=Total size of the files available for download in this version of the dataset: {0} bytes +datasets.api.datasize.ioerror=Fatal IO error while trying to determine the total size of the files stored in the dataset. Please report this error to the Dataverse administrator. +datasets.api.grant.role.not.found.error=Cannot find role named ''{0}'' in dataverse {1} +datasets.api.grant.role.cant.create.assignment.error=Cannot create assignment: {0} +datasets.api.grant.role.assignee.not.found.error=Assignee not found +datasets.api.revoke.role.not.found.error="Role assignment {0} not found" +datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2} +datasets.api.privateurl.error.datasetnotfound=Could not find dataset. +datasets.api.privateurl.error.alreadyexists=Private URL already exists for this dataset. +datasets.api.privateurl.error.notdraft=Can't create Private URL because the latest version of this dataset is not a draft. +datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymized access because this dataset has been published. + + +#Dataverses.java +dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found. +dataverses.api.update.default.contributor.role.success=Default contributor role for Dataverse {0} has been set to {1}. +dataverses.api.update.default.contributor.role.failure.role.does.not.have.dataset.permissions=Role {0} does not have dataset permissions. +dataverses.api.move.dataverse.failure.descendent=Can't move a dataverse to its descendant +dataverses.api.move.dataverse.failure.already.member=Dataverse already in this dataverse +dataverses.api.move.dataverse.failure.itself=Cannot move a dataverse into itself +dataverses.api.move.dataverse.failure.not.published=Published dataverse may not be moved to unpublished dataverse. You may publish {1} and re-try the move. +dataverses.api.move.dataverse.error.guestbook=Dataset guestbook is not in target dataverse. +dataverses.api.move.dataverse.error.template=Dataverse template is not in target dataverse. +dataverses.api.move.dataverse.error.featured=Dataverse is featured in current dataverse. +dataverses.api.move.dataverse.error.metadataBlock=Dataverse metadata block is not in target dataverse. +dataverses.api.move.dataverse.error.dataverseLink=Dataverse is linked to target dataverse or one of its parents. +dataverses.api.move.dataverse.error.datasetLink=Dataset is linked to target dataverse or one of its parents. +dataverses.api.move.dataverse.error.forceMove=Please use the parameter ?forceMove=true to complete the move. This will remove anything from the dataverse that is not compatible with the target dataverse. + +#Access.java +access.api.allowRequests.failure.noDataset=Could not find Dataset with id: {0} +access.api.allowRequests.failure.noSave=Problem saving dataset {0}: {1} +access.api.allowRequests.allows=allows +access.api.allowRequests.disallows=disallows +access.api.allowRequests.success=Dataset {0} {1} file access requests. +access.api.fileAccess.failure.noUser=Could not find user to execute command: {0} +access.api.requestAccess.failure.commandError=Problem trying request access on {0} : {1} +access.api.requestAccess.failure.requestExists=An access request for this file on your behalf already exists. +access.api.requestAccess.failure.invalidRequest=You may not request access to this file. It may already be available to you. +access.api.requestAccess.noKey=You must provide a key to request access to a file. +access.api.requestAccess.fileNotFound=Could not find datafile with id {0}. +access.api.requestAccess.invalidRequest=This file is already available to you for download or you have a pending request +access.api.requestAccess.requestsNotAccepted=Requests for access are not accepted on the Dataset. +access.api.requestAccess.success.for.single.file=Access to File {0} requested. +access.api.rejectAccess.failure.noPermissions=Requestor does not have permission to manage file download requests. +access.api.grantAccess.success.for.single.file=Access to File {0} granted. +access.api.grantAccess.noAssigneeFound=Could not find assignee with identifier {0}. +access.api.grantAccess.failure.commandError=Problem trying grant access on {0} : {1} +access.api.fileAccess.rejectFailure.noRequest=No request for access to file {0} for user {1} +access.api.rejectAccess.success.for.single.file=Access to File {0} rejected. +access.api.revokeAccess.noRoleFound=No File Downloader role found for user {0} +access.api.revokeAccess.success.for.single.file=File Downloader access has been revoked for user {0} on file {1} +access.api.requestList.fileNotFound=Could not find datafile with id {0}. +access.api.requestList.noKey=You must provide a key to get list of access requests for a file. +access.api.requestList.noRequestsFound=There are no access requests for this file {0}. +access.api.exception.metadata.not.available.for.nontabular.file=This type of metadata is only available for tabular files. +access.api.exception.metadata.restricted.no.permission=You do not have permission to download this file. +access.api.exception.version.not.found=Could not find requested dataset version. +access.api.exception.dataset.not.found=Could not find requested dataset. + +#permission +permission.AddDataverse.label=AddDataverse +permission.AddDataset.label=AddDataset +permission.ViewUnpublishedDataverse.label=ViewUnpublishedDataverse +permission.ViewUnpublishedDataset.label=ViewUnpublishedDataset +permission.DownloadFile.label=DownloadFile +permission.EditDataverse.label=EditDataverse +permission.EditDataset.label=EditDataset +permission.ManageDataversePermissions.label=ManageDataversePermissions +permission.ManageDatasetPermissions.label=ManageDatasetPermissions +permission.PublishDataverse.label=PublishDataverse +permission.PublishDataset.label=PublishDataset +permission.DeleteDataverse.label=DeleteDataverse +permission.DeleteDatasetDraft.label=DeleteDatasetDraft + +permission.AddDataverse.desc=Add a dataverse within another dataverse +permission.DeleteDatasetDraft.desc=Delete a dataset draft +permission.DeleteDataverse.desc=Delete an unpublished dataverse +permission.PublishDataset.desc=Publish a dataset +permission.PublishDataverse.desc=Publish a dataverse +permission.ManageDatasetPermissions.desc=Manage permissions for a dataset +permission.ManageDataversePermissions.desc=Manage permissions for a dataverse +permission.EditDataset.desc=Edit a dataset's metadata +permission.EditDataverse.desc=Edit a dataverse's metadata, facets, customization, and templates +permission.DownloadFile.desc=Download a file +permission.ViewUnpublishedDataset.desc=View an unpublished dataset and its files +permission.ViewUnpublishedDataverse.desc=View an unpublished dataverse +permission.AddDataset.desc=Add a dataset to a dataverse + +packageDownload.title=Package File Download +packageDownload.instructions=Use the Download URL in a Wget command or a download manager to download this package file. Download via web browser is not recommended. User Guide - Downloading a Dataverse Package via URL +packageDownload.urlHeader=Download URL + +#mydata_fragment.xhtml +Published=Published +Unpublished=Unpublished +Draft=Draft +In\u0020Review=In Review +Deaccessioned=Deaccessioned + +#Managegroupspage.java +dataverse.manageGroups.user=user +dataverse.manageGroups.users=users +dataverse.manageGroups.group=group +dataverse.manageGroups.groups=groups +dataverse.manageGroups.nomembers=No Members +dataverse.manageGroups.unknown=unknown +dataverse.manageGroups.User=User +dataverse.manageGroups.Group=Group + +#editFilesFragment.xhtml +editfilesfragment.mainlabel=Select Language Encoding... +editfilesfragment.label1=West European +editfilesfragment.label1.item1=Western (ISO-8859-1) +editfilesfragment.label1.item2=Western (ISO-8859-15) +editfilesfragment.label1.item3=Western (Windows-1252) +editfilesfragment.label1.item4=Western (MacRoman) +editfilesfragment.label1.item5=Western (IBM-850) +editfilesfragment.label1.item6=Celtic (ISO-8859-14) +editfilesfragment.label1.item7=Greek (ISO-8859-7) +editfilesfragment.label1.item8=Greek (Windows-1253) +editfilesfragment.label1.item9=Greek (MacGreek) +editfilesfragment.label1.item10=Icelandic (MacIcelandic) +editfilesfragment.label1.item11=Nordic (ISO-8859-10) +editfilesfragment.label1.item12=South European (ISO-8859-3) +editfilesfragment.label2=East European +editfilesfragment.label2.item1=Baltic (ISO-8859-4) +editfilesfragment.label2.item2=Baltic (ISO-8859-13) +editfilesfragment.label2.item3=Baltic (Windows-1257) +editfilesfragment.label2.item4=Cyrillic (ISO-8859-5) +editfilesfragment.label2.item5=Cyrillic (ISO-IR-111) +editfilesfragment.label2.item6=Cyrillic (Windows-1251) +editfilesfragment.label2.item7=Cyrillic (MacCyrillic) +editfilesfragment.label2.item8=Cyrillic/Ukrainian (MacUkrainian) +editfilesfragment.label2.item9=Cyrillic (KOI8-R) +editfilesfragment.label2.item10=Cyrillic/Ukrainian (KOI8-U) +editfilesfragment.label2.item11=Croatian (MacCroatian) +editfilesfragment.label2.item12=Romanian (MacRomanian) +editfilesfragment.label2.item13=Romanian (ISO-8859-16) +editfilesfragment.label2.item14=Central European (ISO-8859-2) +editfilesfragment.label2.item15=Central European (Windows-1250) +editfilesfragment.label2.item16=Central European (MacCE) +editfilesfragment.label2.item17=Cyrillic (IBM-855) +editfilesfragment.label3=East Asian +editfilesfragment.label3.item1=Japanese (ISO-2022-JP) +editfilesfragment.label3.item2=Japanese (Shift_JIS) +editfilesfragment.label3.item3=Japanese (EUC-JP) +editfilesfragment.label3.item4=Chinese Traditional (Big5) +editfilesfragment.label3.item5=Chinese Traditional (Big5-HKSCS) +editfilesfragment.label3.item6=Chinese Traditional (EUC-TW) +editfilesfragment.label3.item7=Chinese Simplified (GB2312) +editfilesfragment.label3.item8=Chinese Simplified (HZ) +editfilesfragment.label3.item9=Chinese Simplified (GBK) +editfilesfragment.label3.item10=Chinese Simplified (ISO-2022-CN) +editfilesfragment.label3.item11=Korean (EUC-KR) +editfilesfragment.label3.item12=Korean (JOHAB) +editfilesfragment.label3.item13=Korean (ISO-2022-KR) +editfilesfragment.label4=Unicode +editfilesfragment.label4.item1=Unicode (UTF-8) +editfilesfragment.label4.item2=Unicode (UTF-16LE) +editfilesfragment.label4.item3=Unicode (UTF-16BE) +editfilesfragment.label5=US-ASCII + +isrequired={0} is required. +isrequired.conditional={0} is required if you choose to enter a value in any of the optional {1} fields. +draftversion=DRAFT VERSION +deaccessionedversion=DEACCESSIONED VERSION + +not_restricted=Not Restricted +editdatafilepage.defaultLanguageEncoding=UTF8 (default) +passwdVal.passwdReq.each=each +passwdVal.passwdReq.uppercase=uppercase +passwdVal.passwdReq.lowercase=lowercase +passwdVal.passwdReq.letter=letter +passwdVal.passwdReq.numeral=numeral +passwdVal.passwdReq.special=special +dataretrieverAPI.noMsgResultsFound=Sorry, no results were found. + +#xlsxfilereader.java +xlsxfilereader.ioexception.parse=Could not parse Excel/XLSX spreadsheet. {0} +xlsxfilereader.ioexception.norows=No rows of data found in the Excel (XLSX) file. +xlsxfilereader.ioexception.onlyonerow=Only one row of data (column name header?) detected in the Excel (XLSX) file. +xlsxfilereader.ioexception.failed=Failed to read line {0} during the second pass. +xlsxfilereader.ioexception.mismatch=Reading mismatch, line {0} during the second pass: {1} delimited values expected, {2} found. +xlsxfilereader.ioexception.linecount=Mismatch between line counts in first and final passes! + +#rtabfileparser.java +rtabfileparser.ioexception.failed=Failed to read line {0} of the Data file. +rtabfileparser.ioexception.mismatch=Reading mismatch, line {0} of the Data file: {1} delimited values expected, {2} found. +rtabfileparser.ioexception.boolean=Unexpected value for the Boolean variable ({0}): +rtabfileparser.ioexception.read=Couldn't read Boolean variable ({0})! +rtabfileparser.ioexception.parser1=R Tab File Parser: Could not obtain varQnty from the dataset metadata. +rtabfileparser.ioexception.parser2=R Tab File Parser: varQnty=0 in the dataset metadata! + +#ConfigureFragmentBean.java +configurefragmentbean.apiTokenGenerated=API Token will be generated. Please keep it secure as you would do with a password. + +#FacetCategory - staticSearchFields +staticSearchFields.dvCategory=Dataverse Category +staticSearchFields.metadataSource=Metadata Source +staticSearchFields.publicationDate=Publication Year +staticSearchFields.fileTypeGroupFacet=File Type +staticSearchFields.dvObjectType=Type +staticSearchFields.fileTag=File Tag +staticSearchFields.fileAccess=Access +staticSearchFields.publicationStatus=Publication Status +staticSearchFields.subject_ss=Subject + +#dataverse category - Facet Labels +Researcher=Researcher +Research\u0020Project=Research Project +Journal=Journal +Organization\u0020or\u0020Institution=Organization or Institution +Teaching\u0020Course=Teaching Course +Research\u0020Group=Research Group +Laboratory=Laboratory +Department=Department +Uncategorized=Uncategorized + +#filetype - Facet Labels +Document=Document +Text=Text +Tabular\u0020Data=Tabular Data +Data=Data +FITS=FITS +Shape=Shape +Image=Image +Network\u0020Data=Network Data +Unknown=Unknown +Documentation=Documentation +Code=Code +Archive=Archive +Audio=Audio +Video=Video + +#access - Facet Labels +Public=Public +Restricted=Restricted + + +#Shibboleth login +idp.fatal.divMissing=
specified as "insertAtDiv" could not be located in the HTML +idp.fatal.noXMLHttpRequest=Browser does not support XMLHttpRequest, unable to load IdP selection data +idp.fatal.wrongProtocol=Policy supplied to DS was not "urn:oasis:names:tc:SAML:profiles:SSO:idpdiscovery-protocol:single" +idp.fatal.wrongEntityId=entityId supplied by SP did not match configuration +idp.fatal.noData=Metadata download returned no data +idp.fatal.loadFailed=Failed to download metadata from +idp.fatal.noparms=No parameters to discovery session and no defaultReturn parameter configured +idp.fatal.noReturnURL=No URL return parameter provided +idp.fatal.badProtocol=Return request must start with https:// or http:// +idp.idpPreferred.label=Use a previous selection: +idp.idpEntry.label=Or enter your institution's name. +idp.idpEntry.NoPreferred.label=Enter your institution's name and click "Continue" to log in via your institution's authentication system. +idp.idpList.label=Or select your institution from the list below. +idp.idpList.NoPreferred.label=Select your institution and click "Continue" to log in via your institution's authentication system. +idp.idpList.defaultOptionLabel=Please select... +idp.idpList.showList=Allow me to pick from a list +idp.idpList.showSearch=Allow me to type the name of my institution +idp.submitButton.label=Continue +idp.helpText=Help +idp.defaultLogoAlt= + +#externaltools +externaltools.dct.displayname=Data Curation Tool +externaltools.dct.description=Data Curation Tool for curation of variables +externaltools.explorer.displayname=Data Explorer +externaltools.explorer.description=The Data Explorer provides a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. +externaltools.textPreviewer.displayname=Read Text +externaltools.textPreviewer.description=Read the text file. +externaltools.htmlPreviewer.displayname=View Html +externaltools.htmlPreviewer.description=View the html file. +externaltools.audioPreviewer.displayname=Play Audio +externaltools.audioPreviewer.description=Listen to an audio file. +externaltools.imagePreviewer.displayname=View Image +externaltools.imagePreviewer.description=Preview an image file. +externaltools.pdfPreviewer.displayname=Read Document +externaltools.pdfPreviewer.description=Read a pdf document. +externaltools.videoPreviewer.displayname=Play Video +externaltools.videoPreviewer.description=Watch a video file. +externaltools.spreadsheetPreviewer.displayname=View Data +externaltools.spreadsheetPreviewer.description=View the spreadsheet data. +externaltools.stataPreviewer.displayname=View Stata File +externaltools.stataPreviewer.description=View the Stata file as text. +externaltools.rPreviewer.displayname=View R file +externaltools.rPreviewer.description=View the R file as text. +externaltools.annotationPreviewer.displayname=View Annotations +externaltools.annotationPreviewer.description=View the annotation entries in a file. +externaltools.mapPreviewer.displayname=View Map +externaltools.mapPreviewer.description=View a map of the file. +externaltools.zipPreviewer.displayname=Preview Zip file +externaltools.zipPreviewer.description=Preview the structure of a Zip file. +externaltools.ncmlPreviewer.displayname=Show NcML (XML) +externaltools.ncmlPreviewer.description=Metadata from NetCDF files. +externaltools.HDF5Preview.displayname=H5Web +externaltools.HDF5Preview.description=Metadata from HDF5 files. +externaltools.mdPreviewer.displayname=Show Markdown (MD) +externaltools.mdPreviewer.description=View the Markdown file. +externaltools.mapShpPreviewer.displayname=View Map +externaltools.mapShpPreviewer.description=View a map of the file. +externaltools.richHtmlPreviewer.displayname=Rich HTML Previewer +externaltools.richHtmlPreviewer.description=View the html file and run potentially malicious JavaScript. Useful for interactive HTML files that use e.g. Plotly +externaltools.rocratePreviewer.displayname=Show RO-Crate +externaltools.rocratePreviewer.description=View the RO-Crate metadata file. + + +# api/admin/datasetfield/load +api.admin.datasetfield.load.ArrayIndexOutOfBoundMessage=Error parsing metadata block in {0} part, line #{1}: missing ''{2}'' column (#{3}) +api.admin.datasetfield.load.GeneralErrorMessage=Error parsing metadata block in {0} part, line #{1}: {2} + +#PIDs +pids.api.reservePid.success=PID reserved for {0} +pids.api.deletePid.success=PID deleted for {0} +pids.deletePid.failureExpected=Unable to delete PID {0}. Status code: {1}. +pids.deletePid.failureOther=Problem deleting PID {0}: {1} +pids.commands.reservePid.failure=Problem reserving PID for dataset id {0}: {1}. +pids.datacite.errors.noResponseCode=Problem getting HTTP status code from {0}. Is it in DNS? Is doi.dataciterestapiurlstring configured properly? +pids.datacite.errors.DoiOnly=Only doi: is supported. + +#PublishDatasetCommand +publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persistent identifier has not been reserved. + +# APIs +api.errors.invalidApiToken=Invalid API token. diff --git a/distros/dataverse.no/modification/Bundle.properties.patch b/distros/dataverse.no/modification/Bundle.properties.patch new file mode 100644 index 0000000..51b3a0c --- /dev/null +++ b/distros/dataverse.no/modification/Bundle.properties.patch @@ -0,0 +1,57 @@ +--- Bundle.properties 2021-08-04 19:13:08.000000000 +0000 ++++ /root/git/dataverse-docker/distros/dataverse.no/modification/Bundle.properties 2022-07-12 10:41:34.201813777 +0000 +@@ -303,8 +303,8 @@ + login.forgot.text=Forgot your password? + login.builtin=Dataverse Account + login.institution=Institutional Account +-login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. +-login.institution.support.blurbwithLink=Leaving your institution? Please contact {0} for assistance. ++login.institution.blurb=Log in or sign up with your institutional account — more information about account creation. ++login.institution.support.blurbwithLink=Leaving your institution? Please contact DataverseNO for assistance. + login.builtin.credential.usernameOrEmail=Username/Email + login.builtin.credential.password=Password + login.builtin.invalidUsernameEmailOrPassword=The username, email address, or password you entered is invalid. Need assistance accessing your account? +@@ -2640,6 +2640,43 @@ + externaltools.dct.description=Data Curation Tool for curation of variables + externaltools.explorer.displayname=Data Explorer + externaltools.explorer.description=The Data Explorer provides a GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. ++externaltools.textPreviewer.displayname=Read Text ++externaltools.textPreviewer.description=Read the text file. ++externaltools.htmlPreviewer.displayname=View Html ++externaltools.htmlPreviewer.description=View the html file. ++externaltools.audioPreviewer.displayname=Play Audio ++externaltools.audioPreviewer.description=Listen to an audio file. ++externaltools.imagePreviewer.displayname=View Image ++externaltools.imagePreviewer.description=Preview an image file. ++externaltools.pdfPreviewer.displayname=Read Document ++externaltools.pdfPreviewer.description=Read a pdf document. ++externaltools.videoPreviewer.displayname=Play Video ++externaltools.videoPreviewer.description=Watch a video file. ++externaltools.spreadsheetPreviewer.displayname=View Data ++externaltools.spreadsheetPreviewer.description=View the spreadsheet data. ++externaltools.stataPreviewer.displayname=View Stata File ++externaltools.stataPreviewer.description=View the Stata file as text. ++externaltools.rPreviewer.displayname=View R file ++externaltools.rPreviewer.description=View the R file as text. ++externaltools.annotationPreviewer.displayname=View Annotations ++externaltools.annotationPreviewer.description=View the annotation entries in a file. ++externaltools.mapPreviewer.displayname=View Map ++externaltools.mapPreviewer.description=View a map of the file. ++externaltools.zipPreviewer.displayname=Preview Zip file ++externaltools.zipPreviewer.description=Preview the structure of a Zip file. ++externaltools.ncmlPreviewer.displayname=Show NcML (XML) ++externaltools.ncmlPreviewer.description=Metadata from NetCDF files. ++externaltools.HDF5Preview.displayname=H5Web ++externaltools.HDF5Preview.description=Metadata from HDF5 files. ++externaltools.mdPreviewer.displayname=Show Markdown (MD) ++externaltools.mdPreviewer.description=View the Markdown file. ++externaltools.mapShpPreviewer.displayname=View Map ++externaltools.mapShpPreviewer.description=View a map of the file. ++externaltools.richHtmlPreviewer.displayname=Rich HTML Previewer ++externaltools.richHtmlPreviewer.description=View the html file and run potentially malicious JavaScript. Useful for interactive HTML files that use e.g. Plotly ++externaltools.rocratePreviewer.displayname=Show RO-Crate ++externaltools.rocratePreviewer.description=View the RO-Crate metadata file. ++ + + # api/admin/datasetfield/load + api.admin.datasetfield.load.ArrayIndexOutOfBoundMessage=Error parsing metadata block in {0} part, line #{1}: missing ''{2}'' column (#{3}) \ No newline at end of file diff --git a/distros/dataverse.no/modification/analytics.xhtml b/distros/dataverse.no/modification/analytics.xhtml new file mode 100644 index 0000000..a9d644b --- /dev/null +++ b/distros/dataverse.no/modification/analytics.xhtml @@ -0,0 +1,24 @@ + + + + + diff --git a/distros/dataverse.no/modification/custom-footer.html b/distros/dataverse.no/modification/custom-footer.html new file mode 100644 index 0000000..15dd63d --- /dev/null +++ b/distros/dataverse.no/modification/custom-footer.html @@ -0,0 +1,387 @@ + + + + + diff --git a/distros/dataverse.no/modification/custom-header.html b/distros/dataverse.no/modification/custom-header.html new file mode 100644 index 0000000..dc606a2 --- /dev/null +++ b/distros/dataverse.no/modification/custom-header.html @@ -0,0 +1,34 @@ + +
+
+

+ Test and Demo only +

+
+
diff --git a/distros/dataverse.no/modification/dataverse_footer.xhtml b/distros/dataverse.no/modification/dataverse_footer.xhtml new file mode 100644 index 0000000..d8b625b --- /dev/null +++ b/distros/dataverse.no/modification/dataverse_footer.xhtml @@ -0,0 +1,93 @@ + +
+
+
+ +
+
+
+ + +
diff --git a/distros/dataverse.no/modification/dataverse_header.xhtml b/distros/dataverse.no/modification/dataverse_header.xhtml new file mode 100644 index 0000000..d05f27e --- /dev/null +++ b/distros/dataverse.no/modification/dataverse_header.xhtml @@ -0,0 +1,408 @@ + + + + + + + + + + + + + + + + + + +
+ + + + + + + + +
+
+ + +
+
+
+
+
+ + + +
+ + + +
+ + + + + + + + + + + +
+ +
+ +
+
+ + + + + + + + + + +
+ + + + +
+
+
+
diff --git a/distros/dataverse.no/modification/loginpage.xhtml b/distros/dataverse.no/modification/loginpage.xhtml new file mode 100644 index 0000000..be00efa --- /dev/null +++ b/distros/dataverse.no/modification/loginpage.xhtml @@ -0,0 +1,238 @@ + + + + + + + + + + + + + + + + + + +
+
+
+ + + + + +
+
+
+
+
+ +
+
+

+ +
+ + + + +
+ +
+ + + + +
+
+
+
+ +
+ +
+ + + + +
+
+
+ + +
+
+

+ +

+ + + + + +
+
+ +
+
+ +
+
+ +
+
+
+ +

+ + + + + + + + + + +

+
+ +
+
+ +
+ +
+
+
+ + + + + +
+ + +
+ +

+ + + + + + + + + + + + + +

+
+
+
+ + + + + + +

+ +

+

+ +

+
+
+
+
+ +
+

#{bundle['auth.providers.title']}

+ + + + + + + +
+

+ + + + +

+
+
+ + +
+
+
+
+
+
+ diff --git a/distros/dataverse.no/runOnce/055-dvwebloader.sh b/distros/dataverse.no/runOnce/055-dvwebloader.sh new file mode 100644 index 0000000..bf2f57f --- /dev/null +++ b/distros/dataverse.no/runOnce/055-dvwebloader.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +export PGPASSWORD=`cat /secrets/db/password` +psql -U dataverse dataverse -h postgres -f ${INIT_SCRIPTS_FOLDER}/affiliations/webloader_check.sql -o /tmp/output +EXIST=`grep Dataverse /tmp/output` + +wget https://github.com/DataverseNO/dvwebloader/archive/refs/heads/main.zip -O /tmp/dvwebloader.zip +unzip -o /tmp/dvwebloader.zip -d $DOCROOT_DIR/logos + +if [[ -z $EXIST ]]; then +echo "Loaded" +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +"{ + \"displayName\": \"Dataverse WebLoader\", + \"description\": \"Upload all the files in a local directory!\", + \"toolName\": \"dvwebloader\", + \"scope\": \"dataset\", + \"contentType\":\"text/plain\", + \"types\": [ + \"explore\" + ], + \"toolUrl\": \"https://${hostname}/logos/dvwebloader-main/src/dvwebloader.html\", + \"toolParameters\": { + \"queryParameters\": [ + { + \"siteUrl\": \"{siteUrl}\" + }, + { + \"datasetPid\": \"{datasetPid}\" + }, + { + \"key\": \"{apiToken}\" + } + ] + } +}" +fi diff --git a/distros/dataverse.no/runOnce/crontab.sh b/distros/dataverse.no/runOnce/crontab.sh new file mode 100644 index 0000000..9680658 --- /dev/null +++ b/distros/dataverse.no/runOnce/crontab.sh @@ -0,0 +1,13 @@ +#!/bin/bash +mkdir /mntblob/databaseDumps/ +chown omsagent /mntblob/databaseDumps/ +usermod -aG docker omsagent +cp -r /distib/private/.ssh /var/opt/microsoft/omsagent/run +chown -R omsagent /var/opt/microsoft/omsagent/run/.ssh +#0 1 * * * /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/backupData.sh" >> /var/log/bakupslogs.log +#0 0 * * * /distrib/dataverse-docker/distros/dataverse.no/init.d/cronjob/dumpdatabase.sh +#*/2 * * * * /bin/bash /distrib/dataverse-docker/restart-dataverse.sh https://dataverse.no >> /var/log/restartlogs.log +#0 16 * * 3 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_on.sh" +#0 06 * * 4 su root /distrib/dataverse-docker/update.sh +#30 06 * * 4 /usr/bin/docker exec dataverse "/opt/payara/init.d/cronjob/maintenance_notification_off.sh" +#0 21 * * * docker exec dataverse bash -c "curl -X POST http://localhost:8080/api/admin/sitemap" diff --git a/distros/dataverse.no/runOnce/previewers.sh b/distros/dataverse.no/runOnce/previewers.sh new file mode 100644 index 0000000..abca01b --- /dev/null +++ b/distros/dataverse.no/runOnce/previewers.sh @@ -0,0 +1,1464 @@ +#!/bin/bash + + +#https://raw.githubusercontent.com/DataverseNO/dataverse-previewers/develop/6.1curlcommands.md +# Example Curl Commands to register previewers for Dataverse, version 5.13+ + +for id in $(curl -s http://localhost:8080/api/admin/externalTools | jq -r .data[].id); do curl -X DELETE "http://localhost:8080/api/admin/externalTools/$id";done + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Read Text", + "description":"Read the text file.", + "toolName":"textPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/TextPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/plain", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Html", + "description":"View the html file.", + "toolName":"htmlPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HtmlPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/html", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Audio", + "description":"Listen to an audio file.", + "toolName":"audioPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"audio/mp3", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Audio", + "description":"Listen to an audio file.", + "toolName":"audioPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"audio/mpeg", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Audio", + "description":"Listen to an audio file.", + "toolName":"audioPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"audio/wav", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Audio", + "description":"Listen to an audio file.", + "toolName":"audioPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"audio/ogg", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Audio", + "description":"Listen to an audio file.", + "toolName":"audioPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/AudioPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"audio/x-m4a", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Image", + "description":"Preview an image file.", + "toolName":"imagePreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ImagePreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"image/gif", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Image", + "description":"Preview an image file.", + "toolName":"imagePreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ImagePreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"image/jpeg", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Image", + "description":"Preview an image file.", + "toolName":"imagePreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ImagePreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"image/png", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Read Document", + "description":"Read a pdf document.", + "toolName":"pdfPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/PDFPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/pdf", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Video", + "description":"Watch a video file.", + "toolName":"videoPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/VideoPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"video/mp4", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Video", + "description":"Watch a video file.", + "toolName":"videoPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/VideoPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"video/ogg", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Play Video", + "description":"Watch a video file.", + "toolName":"videoPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/VideoPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"video/quicktime", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Data", + "description":"View the spreadsheet data.", + "toolName":"spreadsheetPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/comma-separated-values", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Data", + "description":"View the spreadsheet data.", + "toolName":"spreadsheetPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/tab-separated-values", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Data", + "description":"View the spreadsheet data.", + "toolName":"spreadsheetPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/csv", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Data", + "description":"View the spreadsheet data.", + "toolName":"spreadsheetPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/SpreadsheetPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/tsv", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Stata File", + "description":"View the Stata file as text.", + "toolName":"stataPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/TextPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/x-stata-syntax", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View R file", + "description":"View the R file as text.", + "toolName":"rPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/TextPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"type/x-r-syntax", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Annotations", + "description":"View the annotation entries in a file.", + "toolName":"annotationPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HypothesisPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/x-json-hypothesis", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Map", + "description":"View a map of the file.", + "toolName":"mapPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/geo+json", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### MapViewer: + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Map", + "description":"View a map of the file.", + "toolName":"mapPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/geo+json", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### ZIP Previewer: + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Preview Zip file", + "description":"Preview the structure of a Zip file.", + "toolName":"zipPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ZipPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/zip", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Preview ELN file", + "description":"Preview the structure of an ELN Archive.", + "toolName":"zipPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ZipPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/vnd.eln+zip", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### NcML Previewer: + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Show NcML (XML)", + "description":"Metadata from HDF5 files.", + "toolName":"ncmlPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/NcmlPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "requirements": { + "auxFilesExist": [ + { + "formatTag": "NcML", + "formatVersion": "0.1" + } + ] + }, + "contentType":"application/x-hdf5", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Show NcML (XML)", + "description":"Metadata from NetCDF files.", + "toolName":"ncmlPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/NcmlPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "requirements": { + "auxFilesExist": [ + { + "formatTag": "NcML", + "formatVersion": "0.1" + } + ] + }, + "contentType":"application/netcdf", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### H5Web Previewer for HDF5 and NetCDF files: + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"H5Web", + "description":"Explore and visualize HDF5 files", + "toolName":"HDF5Preview", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HDF5Preview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/x-hdf5", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"H5Web", + "description":"Explore and visualize HDF5 files", + "toolName":"HDF5Preview", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/HDF5Preview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/netcdf", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### Markdown Previewer + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Show Markdown (MD)", + "description":"View the Markdown file.", + "toolName":"mdPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MdPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/markdown", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + +### ESRI Shape Previewer (beta) + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Map", + "description":"View a map of the file.", + "toolName":"mapShpPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapShpPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/zipped-shapefile", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### GeoTIFF Previewer (beta) + + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"View Map", + "description":"View a map of the file.", + "toolName":"mapShpPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/MapRasterPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"image/tiff", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + +### Rich HTML Previewer - Potential Issues if used with malicious content. + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Rich HTML Previewer", + "description":"View the html file and run potentially malicious JavaScript. Useful for interactive HTML files that use e.g. Plotly", + "toolName":"richHtmlPreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/RichHtmlPreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"text/html", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + + +### RO-Crate Previewer (beta) + +curl -X POST -H 'Content-type: application/json' http://localhost:8080/api/admin/externalTools -d \ +'{ + "displayName":"Show RO-Crate", + "description":"View the RO-Crate metadata file.", + "toolName":"rocratePreviewer", + "scope":"file", + "types":["preview"], + "toolUrl":"https://dataverseno.github.io/dataverse-previewers/previewers/v1.4/ROCratePreview.html", + "toolParameters": { + "queryParameters":[ + {"fileid":"{fileId}"}, + {"siteUrl":"{siteUrl}"}, + {"datasetid":"{datasetId}"}, + {"datasetversion":"{datasetVersion}"}, + {"locale":"{localeCode}"} + ] + }, + "contentType":"application/ld+json; profile=\"http://www.w3.org/ns/json-ld#flattened http://www.w3.org/ns/json-ld#compacted https://w3id.org/ro/crate\"", + "allowedApiCalls": [ + { + "name": "retrieveFileContents", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=true", + "timeOut": 3600 + }, + { + "name": "downloadFile", + "httpMethod": "GET", + "urlTemplate": "/api/v1/access/datafile/{fileId}?gbrecs=false", + "timeOut": 3600 + }, + { + "name": "getDatasetVersionMetadata", + "httpMethod": "GET", + "urlTemplate": "/api/v1/datasets/{datasetId}/versions/{datasetVersion}", + "timeOut": 3600 + } + ] +}' + + diff --git a/distros/dataverse.no/runOnce/readETAGLogFileAndCopy.sh b/distros/dataverse.no/runOnce/readETAGLogFileAndCopy.sh new file mode 100644 index 0000000..7c8e903 --- /dev/null +++ b/distros/dataverse.no/runOnce/readETAGLogFileAndCopy.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +cp -r /secrets/aws-cli/.aws ~ + +# AccessURL="[AZURE_BLOB_URL]" +OGINALBaseFolder="/dataCorrect/dataverse-files" +BaseFolder="/dataverse/dataverse-files" + +#BASEURL="https://....blob.core.windows.net/data1" +FILEPATH="/dataCorrect/dataverse-files/" +LogFile="./checkETAG_2024.log" +LogFile2="./checkETAG_not_copy.log" + +S3URLAWS="s3://URL/" + + +while true; do + + if [ -f "${LogFile}" ]; then + line=$(head -n 1 "${LogFile}") + + IFS=':' read -r -a arrayFerst <<< "$line" + + if [ "is not equal" == "${arrayFerst[0]}" ]; then + + IFS=" -- " read -r -a arraySecend <<< "${arrayFerst[1]}" + FileCopy="${arraySecend[0]}" + CheckMd5Database="${arraySecend[1]}" + + if [ ! -z "${OGINALBaseFolder}/${FileCopy}" ]; then + md5BlobBase64=$(curl -s "${BASEURL}${FILEPATH}${FileCopy}${KEYWINDOWSBLOB}" -I -q | grep "Content-MD5: " | awk '{ print $2 }' | base64 -di) + + if [ $? -eq 0 ]; then + md5Blob=$(echo -n "$md5BlobBase64" | xxd -p) + if [ "${CheckMd5Database}" == "${md5Blob}" ]; then + + cp -fa ${OGINALBaseFolder}${FileCopy} ${BaseFolder}${FileCopy} + aws s3 cp ${OGINALBaseFolder}${FileCopy} ${S3URLAWS}${FileCopy} --recursive + + else + echo -n " orginal file these md5 -> " >> "${LogFile2}" + head -n 1 "${LogFile}" >> "${LogFile2}" + + fi + else + echo -n " orginal blob error -> " >> "${LogFile2}" + head -n 1 "${LogFile}" >> "${LogFile2}" + fi + else + echo -n " file not in orginal blob -> " >> "${LogFile2}" + head -n 1 "${LogFile}" >> "${LogFile2}" + fi + else + echo -n " file not in blob -> " >> "${LogFile2}" + head -n 1 "${LogFile}" >> "${LogFile2}" + fi + + sed '1d' "${LogFile}" > "${LogFile}.tmp" + mv "${LogFile}.tmp" "${LogFile}" + + if [ ! -s "${LogFile}" ]; then + rm "${LogFile}" + exit 0 + fi + fi +done \ No newline at end of file diff --git a/distros/dataverse.no/runOnce/update_5.14.sh b/distros/dataverse.no/runOnce/update_5.14.sh new file mode 100644 index 0000000..e647856 --- /dev/null +++ b/distros/dataverse.no/runOnce/update_5.14.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +echo "Enable File PID:" +curl -X PUT -d 'true' http://localhost:8080/api/admin/settings/:FilePIDsEnabled +echo "PID finish" + +echo "Downlaod and load citation.tsv file" +curl -s https://github.com/IQSS/dataverse/releases/download/v5.14/citation.tsv -o /tmp/citation.tsv +curl http://localhost:8080/api/admin/datasetfield/load -X POST --data-binary @/tmp/citation.tsv -H "Content-type: text/tab-separated-values" +echo "citation.tsv finish" + +# Ticket #65 +curl -X PUT -d 0 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit + +rm /tmp/citation.tsv \ No newline at end of file diff --git a/doc/README.rst b/doc/README.rst new file mode 100644 index 0000000..a9de6af --- /dev/null +++ b/doc/README.rst @@ -0,0 +1,43 @@ +The following documentation will guide you through building a custom docker container with a Dataverse project on Microsoft Azure Cloud. DataverseNO uses Docker containers to manage Dataverse deployment and updates. + +Motivation for DataverseNO Docker Container +------------------------------------------- + +Let assume that you want to deploy an application to a server. In your local test system, the application works just fine without any problem. But when you have deployed the same application into a another server for production or demo, boom! Your application does not work anymore. Many factors can contribute making this happen. It could be the operating system compatibility or different library versions. Therefore, your application could not be deployed successfully, and you will get a lot of challenges. +Docker will come to your help and will help remove these challenges because of the incompatibility problems. + +This documentation will show you how to use Docker to containerize your Dataverse application so you can run them on any servers regardless of their operating system inside of them. DataverseNO test was done on Ubuntu OS on the Microsoft Azure Cloud. + + +Requirements & Prerequisites +---------------------------- + +SSH to a working VM as the administrator and make sure that you have sudo rights to install. + +The DataverseNO Docker Container platform stands on the shoulders of many other software projects. Before running it, the following must be installed and/or configured: + +- Authentication integration options (Login to DatavarseNO): + + - [FEIDE SAML / OpenID](https://www.feide.no) - EIDE is a Norwegian governement solution for secure identification in the education sectors. We use FEIDE SAML as our identity management and sing sign on (SSO). FEIDE SAML activates single Sign On (SSO) for our Dataverse application. + - Azure OpenID - OpenID Connect is a security-token based extension of the OAuth 2.0 authorization protocol to do single sign-on. Azure OpenID supports single sign-on and API access to Dataverse application. + - [ORCID openID](https://info.orcid.org/ufaqs/) - ORCID openID provides individual researchers and scholars with a persistent unique identifier. ORCID iDs enable reuse of items in new contexts by making connections between items from the same author in different places. Authentication with ORCID is supported in Dataverse. Registration for the production Members API service is open to ORCID members organizations only. UiT is an organisation member and registered with ORCID. + - [eduGAIN](https://edugain.org) - The eduGAIN interfederation service connects identity federations around the world, simplifying access to content, services and resources for the global research and education community. +- SMTP server - is used to send, and relay outgoing email between DataverseNO (sender) and receivers. We use our UiT smtp server. + +- Storage + + - VM server - Stores the Dataverse application, custom scripts and configuration files. + - Cloudian S3 storage - Uses for storing dataset files. Cloudian provides exabyte-scale storage for capacity-intensive workloads: S3-compatible storage for on-prem, hybrid cloud, and multi-cloud. + - Blob storage (mounted on the VM) + +- [Docker and Docker-compose](https://https://www.docker.com/) - Docker is an open platform that allows developing, shipping, and running applications by using containers (i.e. packages containing all the parts an application needs to function, such as libraries and dependencies). Containers are isolated from each other and bundle their own software, libraries, and configuration files; they can communicate with each other through well-defined channels. Docker run is entirely command line based, and only start one container at a time. Docker-compose reads configuration data from a YAML file and run multiple containers. + + Steps needed to implementation of DataverseNO Docker Container: + + 1) Installation of Docker on a VM machine, + 2) Creation of Dockerfile, and + 3) Building images and development of Dockerfile. + +* [Git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - Git is a distributed version control system that tracks changes in any set of computer files, usually used for coordinating work among programmers collaboratively developing source code during software development. DataverseNO users git to track the changes made on files. You can reate a new project/repo or repo from an existing project if it has not be done. + ++ [Azure-cli](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) - The Azure Command-Line Interface (CLI) is a cross-platform command-line tool to connect to Azure and execute administrative commands on Azure resources. It allows the execution of commands through a terminal using interactive command-line prompts or a script. diff --git a/doc/Troubleshooting b/doc/Troubleshooting new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/doc/Troubleshooting @@ -0,0 +1 @@ + diff --git a/doc/customization.rst b/doc/customization.rst new file mode 100644 index 0000000..c7e4764 --- /dev/null +++ b/doc/customization.rst @@ -0,0 +1,24 @@ +login page +---------- + +init.d/204-custumisation.sh + +Header +------ + +init.d/204-custumisation.sh + +footer +------ + +init.d/204-custumisation.sh + +analitics matomo +---------------- + +init.d/100-analytics.sh + +dataverse support form changed to email +--------------------------------------- + +init.d/201-bundle.sh diff --git a/doc/env.rst b/doc/env.rst new file mode 100644 index 0000000..41678db --- /dev/null +++ b/doc/env.rst @@ -0,0 +1,154 @@ +Environment variables +===================== + +To run Dataverse as a completely operational production service, data providers should fill all settings in the configuration file containing information about their domain name, DOIs settings, the language of web interface, mail relay, external controlled vocabularies and storage. There is also possibility to integrate Docker based custom services in the infrastructure and create own software packages serving the needs of the specific data providers, for example, to integrate a separate Shibboleth container for the federated authentication, install new data previewer or activate data processing pipeline. + +Configuration +~~~~~~~~~~~~~ + +The configuration is managed in the central place in an environmental variables file called .env, so administrators have no need to modify other files in the software package. It contains all necessary settings required to deploy Dataverse, for example, to set the language or web interface, establish connection to the local database, SOLR search engine, mail relay or external storage. + + +main configuration +------------------ + +CONFIGURATION_PATH=/distrib/private + +DOCROOT=/distrib + +VERSION=5.9.1 + +DOCKER_HUB=presacrd4oilmd5ss77y.azurecr.io/dataverseno + +SECRETS_DIR="${CONFIGURATION_PATH}/secrets" + +POSTGRESTMP=/mnt/tmp/postgres + + +Dataverse database settings +--------------------------- + +DATAVERSE_DB_HOST=postgres + +DATAVERSE_DB_USER=dataverse + +DATAVERSE_DB_PASSWORD=password + +DATAVERSE_DB_NAME=dataverse + +solr +---- + +SOLR_SERVICE_HOST=solr:8983 + +SOLR_SERVICE_PORT=8983 + +DATAVERSE_URL=localhost:8080 + +DATAVERSE_SERVICE_HOST=localhost + +LOCAL_STORAGE=/mntblob + +Conter Processor +---------------- + +COUNTERPROSVERSION=0.1.04 + +GEOIPLICENSE=licencekey + +CONFIG_FILE=counter-processor-config.yaml + +Postgres settings +----------------- + +POSTGRES_USER=dataverse + +POSTGRES_PASSWORD= + +POSTGRES_SERVER=postgres + +POSTGRES_DATABASE=dataverse + +POSTGRES_DB=dataverse + +Domain configuration and init folder +------------------------------------ + +hostname=test-docker.dataverse.no + +traefikhost=test-docker.dataverse.no + +INIT_SCRIPTS_FOLDER=/opt/payara/init.d + +Webhook configuration to bundle external services +------------------------------------------------- + +WEBHOOK=/opt/payara/triggers/external-services.py + +#CESSDA=True + +#CLARIN=True + +DOI parameters +-------------- + +# https://guides.dataverse.org/en/latest/installation/config.html#doi-baseurlstring + +doi_authority=10.21337 + +doi_provider=DataCite + +doi_username=username + +doi_password=password + +dataciterestapiurlstring=https\:\/\/api.test.datacite.org + +baseurlstring=https\:\/\/mds.test.datacite.org + +AWS settings +------------ + +# https://guides.dataverse.org/en/latest/installation/config.html#id90 + +aws_bucket_name=2002-green-dataversenotest1 + +aws_s3_profile=cloudian + +aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + +AWS UiT +--------- + +aws_uit_bucket_name=p-uit-dataverse01-sth + +aws_uit_s3_profile=uit + +#aws_endpoint_url=https\:\/\/s3-oslo.educloud.no + +Mail relay +--------- + +# https://guides.dataverse.org/en/latest/developers/troubleshooting.html + +system_email= + +mailhost=smtp-relay.exemple.com + +mailuser=no-reply@dataverse.no + +no_reply_email=no-reply@dataverse.no + +smtp_password=password + +smtp_port=465 + +socket_port=465 + +Federated authentification file +------------------------------- + +# https://guides.dataverse.org/en/latest/installation/shibboleth.html + +federated_json_file=/secrets/openid.json + diff --git a/doc/envFileSetup b/doc/envFileSetup new file mode 100644 index 0000000..f082cff --- /dev/null +++ b/doc/envFileSetup @@ -0,0 +1,46 @@ +The following variables need to be changed in .env depending on your instalation + +.. code-block:: bash + + hostname=dataverse.azure.com + traefikhost=dataverse.azure.om + +main configuration + +.. code-block:: bash + + DISTRIB=/distrib + CONFIGURATION_PATH=/distrib/private + +Solr + +.. code-block:: bash + + LOCAL_STORAGE=/mntblob + +Counter Processor + +.. code-block:: bash + + GEOIPLICENSE=licencekey + +Postgres settings + +.. code-block:: bash + + POSTGRES_PASSWORD=password + + +DOI parameters + +.. code-block:: bash + + doi_authority=10.21337 + doi_username=username + doi_password=doiword + +AWS + +.. code-block:: bash + + aws_bucket_name=bucketName diff --git a/doc/functionalityValidation.rst b/doc/functionalityValidation.rst new file mode 100644 index 0000000..3b297aa --- /dev/null +++ b/doc/functionalityValidation.rst @@ -0,0 +1,90 @@ +Default admin login +------------------- + +username : dataverseAdmin + +password : admin + + +S3 storage +---------- + +For testing purpuses S3 storage fuctionality can be disabled using : + +``mv /distrib/dataverse-docker/distros/dataverse.no/init.d/0*s3*.sh /tmp/`` + +then restart dataverse + +mail relay +---------- + +in ``.env`` +~~~~~~~~~~~ + +Set ``system_email=`` + +for example ``system_email=000xxx@uit.no`` + + +set the smtp relay + +.. code-block:: bash + + mailhost=smtp-relay.exemple.com + mailuser=no-reply@dataverse.no + no_reply_email=no-reply@dataverse.no + smtp_password=password + smtp_port=465 + socket_port=465 + +in the web interface +~~~~~~~~~~~~~~~~~~~~ +Change the administrator email at https:///dataverseuser.xhtml?selectTab=accountInfo + +the "Verify email" button should send en email. + +doi settings +------------ + +in ``.env`` +~~~~~~~~~~~ + +set doi configuration + +.. code-block:: bash + +in ``/secrets`` +~~~~~~~~~~~~~~~~ + +set the password in ``$DISTRIB/private/secrets/doi_asadmin`` + +for example with "changeme" as a password ``AS_ADMIN_ALIASPASSWORD=changeme`` + +set the password in ``$DISTRIB/private/secrets/doi/password`` + +for example with "changeme" as a password ``changeme`` + + +feide authentication +-------------------- + +local storage +------------- + +S3 support +---------- + +if S3 storage was disabled re unable it using : + + +``mv /tmp/0*s3*.sh /distrib/dataverse-docker/distros/dataverse.no/init.d/`` + +large files +----------- + + +counter processor +----------------- + +custom settings +-------------- diff --git a/doc/installation.rst b/doc/installation.rst new file mode 100644 index 0000000..d44f551 --- /dev/null +++ b/doc/installation.rst @@ -0,0 +1,228 @@ +Dataverse installation on Microsoft Azure +========================================= + +Installation of docker, docker-compose, git and, azure-cli +---------------------------------------------------------- + +Update APT sources +------------------ + +This needs to be done so as to access packages from Docker repository. + +1. Log into your VM machine as a user.with sudo or root privileges. + +2. On your terminal, sudo or root privileges + +3. Update package information, ensure that APT works with the https method, and that CA certificates are installed. + +.. code-block:: bash + + sudo su + apt-get update + apt-get install -y \ + ca-certificates \ + curl \ + azure-cli \ + gnupg \ + lsb-release + +4. Add Docker’s official GPG key: + +.. code-block:: bash + + curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg + +If you get the following message: "File '/usr/share/keyrings/docker-archive-keyring.gpg' exists. Overwrite? (y/N)". Answer "y" + +Verify that the key fingerprint is for example 9DC8 5822 9FC7 DD38 854A E2D8 8D81 803C 0EBF CD88 + +.. code-block:: bash + +sudo apt-key fingerprint + +to see where your trusted.gpd + +5. Fnd the entry in the table below which corresponds to your Ubuntu version. This determines +where APT will search for Docker packages. + +Run the following command, substituting the entry for your operating system for the placeholder . + +.. code-block:: bash + + echo \ + "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ + $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null + +6. Update the APT package index by executing sudo apt-get update. + +.. code-block:: bash + + apt-get update + +7. Verify that APT is pulling from the right repository. The version currently installed is marked with ***. + +.. code-block:: bash + + apt-cache policy docker-engine + +8. Install Docker Community Edition and git + +.. code-block:: bash + + apt-get install -y docker-ce docker-ce-cli containerd.io + curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose + chmod +x /usr/local/bin/docker-compose + apt-get install -y git azure-cli + +Dataverse root folder +--------------------- + +Create a folder for secrets and define it in ``CONFIGURATION_PATH`` and ``DOCROOT`` default : ``/distrib/`` + +.. code-block:: bash + + export DISTRIB=/distrib + export CONFIGURATION_PATH=$DISTRIB/private + mkdir $DISTRIB + mkdir $CONFIGURATION_PATH + cd $DISTRIB + + + + +Clone the git +------------- + +It is assumed here that you have already created a project and a git repository. See [GitHub](https://docs.github.com) on how to create a new project/repo or repo from an existing project. + +.. code-block:: bash + + git clone https://github.com/DataverseNO/dataverse-docker.git + cd $DISTRIB/dataverse-docker/ + git checkout dataverse.no + cp -r $DISTRIB/dataverse-docker/secrets $CONFIGURATION_PATH + cp .env_sample .env + az login --identity + az acr login --name presacrd4oilmd5ss77y + docker network create traefik + +Environment variables +--------------------- +If you are using docker-compose, you can skip setting the environment variables manually, as they will be set in the docker-compose.yml file or a .env file. + +We have a pre-configured environment variables (.env) stored at our resource archive + +.. code-block:: bash + + cd /tmp + tar -xvzf /resourses.tar.gz + cp /tmp/distrib/private/.env $DISTRIB/dataverse-docker/ + +go to "Check that your dataverse installation is accessible" + +The following variables (domain name) need to be changed in .env + +.. code-block:: bash + + hostname=dataverse.azure.com + traefikhost=dataverse.azure.om + +main configuration + +.. code-block:: bash + + DISTRIB=/distrib + CONFIGURATION_PATH=/distrib/private + +Solr + +.. code-block:: bash + + LOCAL_STORAGE=/mntblob + +Counter Processor + +.. code-block:: bash + + GEOIPLICENSE=licencekey + +Postgres settings + +.. code-block:: bash + + POSTGRES_PASSWORD=password + + +DOI parameters + +.. code-block:: bash + + doi_authority=10.21337 + doi_username=username + doi_password=password + +AWS + +.. code-block:: bash + + + +Certificates installation +------------------------- + +Request the certificates from the correct authority + +dataverse.no.pem order: + +local, in file $[hostname].pem + +Intermediate, in file sectigo-intermediate.pem + +Root, in file sectigo-intermediate.pem + +To make the certificate pem file ``cat sectigo-ecc-intermediate.pem >> *dataverse.no.pem`` + + + +certificates should be put in ``$CONFIGURATION_PATH/configuration/files`` there are 2 files a .pem file and a .key file + +The name of the certificates files should match the name in ``$CONFIGURATION_PATH/configuration/files/certificates.toml`` + +Check the certificates with ``curl --insecure -vvI https://0.0.0.0:443 2>&1 | awk 'BEGIN { cert=0 } /^\* SSL connection/ { cert=1 } /^\*/ { if (cert) print }'`` + + +DOCROOT +------- + +The appropriate docroot folder needs to be copied in ``$DISTRIB/docroot`` +for example ``rsync -arzvP --rsh=ssh ./docroot [ServerName]:/distrib/docroot`` + + + +Apache and shibboleth configuration +----------------------------------- +Apache configuration + +Change domainname in shibboleth ``shibboleth/shibboleth2.xml`` + +Change domainname twice in shibboleth ``distros/dataverse.no/configs/http-ssl.conf`` + +Change domainname twice in shibboleth ``./distros/dataverse.no/configs/domain.xml`` + +Copy keyen.sh comand + +Check that your dataverse installation is accessible +---------------------------------------------------- +.. code-block:: bash + + cd $DISTRIB/dataverse-docker/ + docker-compose up -d + +Cronjob to automatically restart dataverse +------------------------------------------ + +NB:remeber to stop it if you want it stoped :) + +``*/3 * * * * /bin/bash /root/restart-dataverse.sh https://test-docker.dataverse.no`` + + diff --git a/doc/maintenance.rst b/doc/maintenance.rst new file mode 100644 index 0000000..9237217 --- /dev/null +++ b/doc/maintenance.rst @@ -0,0 +1,196 @@ +Create dump of production database +---------------------------------- + +connect using ssh to production VM + +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` + +.. code-block:: bash + + su postgres + pg_dump -U dataverse dataverse > /tmp/dataverse.dump; + +Transmit dump file to appropriate vm using rsync ``rsync -arvzP --rsh=ssh :/tmp/dataverse.dump :/tmp/dataverse.dump `` + +Upload dump of production database +---------------------------------- + +Connect using ssh to new VM + + +.. code-block:: bash + + docker cp /tmp/dataverse.dump postgres:/tmp/ + docker stop dataverse + + +If you are using a dockerized version : ``docker exec -it postgres /bin/sh`` + +.. code-block:: bash + + su postgres + dropdb -U dataverse dataverse; + createdb -U dataverse dataverse; + psql -U dataverse dataverse -f /tmp/dataverse.dump + + Change password + + + +Usefull database alteration +=========================== + +Replace production DOI with test DOI +------------------------------------ + +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` + +.. code-block:: bash + + su postgres + psql -U dataverse dataverse + update dvobject set authority='10.21337' where authority like '%10.18710%'; + +Change dataverse admin password +------------------------------- + +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` + +.. code-block:: bash + + Su postgres + psql -U dataverse dataverse + update builtinuser set encryptedpassword= '' where username like '%dataverseAdmin%'; + + +Change the database passord +--------------------------- + +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` +this needs to be consistent with the passord in ``secrets/db/password`` and in ``.env`` + +.. code-block:: bash + + su postgres + ALTER USER DATAVERSE WITH PASSWORD ''; + + + +Change feide login endpoint +--------------------------- + +if you are using a dockerized version : ``docker exec -it postgres /bin/sh`` + +.. code-block:: bash + + su postgres + psql -U dataverse dataverse + update authenticateduserlookup set persistentuserid=regexp_replace(persistentuserid, 'idp\.', 'idp-test.'); + +Setting up an S3 bucket +----------------------- + +Create an S3 bucket using your prefered provider + +The asadmin commands descried in https://guides.dataverse.org/en/latest/installation/config.html#amazon-s3-storage-or-compatible and be found in `distros/dataverse.no/init.d/006-s3-aws-storage.sh `_ Create one file for every new bucket. + +in .env(add link) change the folowing + +.. code-block:: bash + + aws_uit_bucket_name= + aws_uit_s3_profile= + aws_endpoint_url= + +The can be for exemple ``https\:\/\/s3-oslo.educloud.no`` , specials caracters need to be escaped. If ussing an aws provided bucket the endpoint is not nessesary and the region should be set instead in `secrets/aws-cli/.aws/config`_ + +The region and format should be set in `secrets/aws-cli/.aws/config`_ if using a custom endpoint should be set to a non existant region. + +.. _secrets/aws-cli/.aws/config: https://github.com/DataverseNO/dataverse-docker/blob/dataverse.no/secrets/aws-cli/.aws/config/ +.. code-block:: bash + [] + output = json + region = + +The credentials should be set in `secrets/aws-cli/.aws/credentials `_ + +.. code-block:: bash + + [] + aws_access_key_id= + aws_secret_access_key= + +Copy files to and from S3 storage +----------------------------------- + + +Change File storage location +---------------------------- + + +File stored in S3 : S3://10.21337/WFD8O0 + +File stored in local +``select * from dvobject where identifier like '%XCCW4L%';`` : file://10.21337/XCCW4L + +the following update statement is to update the files while not affecting the external datasets harvested form other locations listed in table 'dataset'. + +.. code-block:: sql + + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + +the following update statement is to update the datasets while not affecting the external datasets harvested form other locations listed in table 'dataset'. + +.. code-block:: sql + + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + +in the following exemple = S3 and = 2002-green-dataversenotest1 + +.. code-block:: sql + + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + +exemple to update for a specifics owner: + +.. code-block:: sql + + UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'file://','S3://2002-green-dataversenotest1:') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.owner_id=107543 and o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%file://%'); + +Get MDF5 for the files uploaded today +------------------------------------- + +.. code-block:: sql + + select * from dvobject as dv, datafile as df where dv.dtype='DataFile' and modificationtime>='2022-09-20' and dv.id=df.id order by df.id desc limit 10; + + +the mdf is corespmding to the etag in cloudian + + +Get MDF5 for the files uploaded today +------------------------------------- + +Delete action logs older then 90 days + +.. code-block:: sql + + DELETE FROM actionlogrecord WHERE starttime < current_timestamp - interval '90 days'; + + + +.. code-block:: sql + + dataverse=# UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'S3://2002-green-dataversenotest1:','file://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'DataFile' AND s.id = o.owner_id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%S3://%'); + dataverse=# UPDATE dvobject SET storageidentifier=REPLACE(storageidentifier,'S3://','file://') WHERE id IN (SELECT o.id FROM dvobject o, dataset s WHERE o.dtype = 'Dataset' AND s.id = o.id AND s.harvestingclient_id IS null AND o.storageidentifier LIKE '%S3://%'); + + +Resourcess +---------- + +samL packet lookup + +https://addons.mozilla.org/en-US/firefox/addon/saml-tracer/ + + diff --git a/doc/prerequisitsResourses.rst b/doc/prerequisitsResourses.rst new file mode 100644 index 0000000..def6359 --- /dev/null +++ b/doc/prerequisitsResourses.rst @@ -0,0 +1,22 @@ + +in ``/ressourse`` + +SSL certificate ``/resourse/private/configuration`` + +Database ``/resourse/private/database-data`` + +Secrets ``/resourse/private/secrets`` + +Shibboleth ``/resourse/private/shibboleth`` + +env ``/resourse/private/.env`` + +Docroot ``/resourse/docroot`` + + +GET ACESS TO THE DOCKER HUB!!!!!!!! + + +.. code-block:: bash + + cp /resourse/* $DISTRIB/ diff --git a/doc/shibbotheth.rst b/doc/shibbotheth.rst new file mode 100644 index 0000000..3643369 --- /dev/null +++ b/doc/shibbotheth.rst @@ -0,0 +1,20 @@ +Shibboleth +========== + +Asuming a working shibboleth configuration in ``/tmp/shibboleth.tar.gz`` + +Copy and extract the files to the proper location + +.. code-block:: bash + + export DISTRIB=/distrib + cp /tmp/shibboleth* $DISTRIB/private + cd $DISTRIB/private + tar -xvf shibboleth.tar.gz + cd $DISTRIB/private/shibboleth + +Change domain name ``entityID`` in ``shibboleth2.xml`` to the domain name of the instalation + +Change SMAL service provider location + +you are done (mostely) diff --git a/doc/testing.rst b/doc/testing.rst new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/doc/testing.rst @@ -0,0 +1 @@ + diff --git a/restart-dataverse.sh b/restart-dataverse.sh new file mode 100755 index 0000000..ce94f14 --- /dev/null +++ b/restart-dataverse.sh @@ -0,0 +1,48 @@ +#!/bin/bash +if [ $# -eq 0 ] + then + echo "Usage : restart-dataverse.sh [dataverse adress] [distrib location(optional)]" + exit 1 + fi + +# Check if Dataverse is online +healthcheck="/tmp/healthcheck.log" +restartpid='/tmp/restart.pid' +rm $healthcheck +DATAVERSE=$1 #'https://test-docker.dataverse.no' +DISTRIB=${2:-'/distrib'} +DELAY=15 +echo $DATAVERSE + +curl -s ${DATAVERSE}/api/dataverses/root|grep "name" >> $healthcheck + +cat $healthcheck + +if test "`find $restartpid -mmin +15 -print 2>/dev/null`" + then + echo "${restartpid} is too old, deleting, this may lead to an other restart" + rm $restartpid + fi + + +if [ -s $healthcheck ]; +then + rm $restartpid + echo "Dataverse ${DATAVERSE} is running. " +else + echo "Dataverse ${DATAVERSE} is stopped" + if [ -s $restartpid ]; + then + echo "Dataverse is restarting..." + else + echo 'restarting...' > $restartpid + date >> /mntblob/logs/restart.log + cd ${DISTRIB}/dataverse-docker + echo "down" + /usr/local/bin/docker-compose down + echo "waiting ${DELAY}s for ${DATAVERSE} to go down" + sleep $DELAY + echo "up" + /usr/local/bin/docker-compose up -d + fi +fi diff --git a/secrets/aws-cli/.aws/config b/secrets/aws-cli/.aws/config new file mode 100755 index 0000000..5a6050e --- /dev/null +++ b/secrets/aws-cli/.aws/config @@ -0,0 +1,13 @@ +[cloudian] +output = json +region = + +[uit] +output = json +region = + +[default] +output = json +region = + + diff --git a/secrets/aws-cli/.aws/credentials b/secrets/aws-cli/.aws/credentials new file mode 100755 index 0000000..c1f1cce --- /dev/null +++ b/secrets/aws-cli/.aws/credentials @@ -0,0 +1,7 @@ +[cloudian] +aws_access_key_id= +aws_secret_access_key= + +[uit] +aws_access_key_id= +aws_secret_access_key= diff --git a/secrets/aws-cli/aws-list.sh b/secrets/aws-cli/aws-list.sh new file mode 100755 index 0000000..3afa73c --- /dev/null +++ b/secrets/aws-cli/aws-list.sh @@ -0,0 +1,2 @@ +#!/bin/bash +aws --endpoint-url https:// s3api list-objects-v2 --bucket | clientSecret:", + "enabled":true +} + diff --git a/shibboleth/apache.config b/shibboleth/apache.config new file mode 100644 index 0000000..d692f38 --- /dev/null +++ b/shibboleth/apache.config @@ -0,0 +1,56 @@ +# https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPApacheConfig + +# RPM installations on platforms with a conf.d directory will +# result in this file being copied into that directory for you +# and preserved across upgrades. + +# For non-RPM installs, you should copy the relevant contents of +# this file to a configuration location you control. + +# +# Load the Shibboleth module. +# +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# An Apache handler needs to be established for the "handler" location. +# This applies the handler to any requests for a resource with a ".sso" +# extension. +# + + SetHandler shib-handler + + +# +# Ensures handler will be accessible. +# + + Satisfy Any + Allow from all + + +# +# Used for example style sheet in error templates. +# + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + Satisfy Any + Allow from all + + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibCompatWith24 On + ShibRequestSetting requireSession 1 + require shib-session + diff --git a/shibboleth/apache24.config b/shibboleth/apache24.config new file mode 100644 index 0000000..3a0a7b2 --- /dev/null +++ b/shibboleth/apache24.config @@ -0,0 +1,53 @@ +# https://wiki.shibboleth.net/confluence/display/SHIB2/NativeSPApacheConfig + +# RPM installations on platforms with a conf.d directory will +# result in this file being copied into that directory for you +# and preserved across upgrades. + +# For non-RPM installs, you should copy the relevant contents of +# this file to a configuration location you control. + +# +# Load the Shibboleth module. +# +LoadModule mod_shib /usr/lib64/shibboleth/mod_shib_24.so + +# +# Turn this on to support "require valid-user" rules from other +# mod_authn_* modules, and use "require shib-session" for anonymous +# session-based authorization in mod_shib. +# +ShibCompatValidUser Off + +# +# Ensures handler will be accessible. +# + + AuthType None + Require all granted + + +# +# Used for example style sheet in error templates. +# + + + AuthType None + Require all granted + + Alias /shibboleth-sp/main.css /usr/share/shibboleth/main.css + + +# +# Configure the module for content. +# +# You MUST enable AuthType shibboleth for the module to process +# any requests, and there MUST be a require command as well. To +# enable Shibboleth but not specify any session/access requirements +# use "require shibboleth". +# + + AuthType shibboleth + ShibRequestSetting requireSession 1 + require shib-session + diff --git a/shibboleth/attrChecker.html b/shibboleth/attrChecker.html new file mode 100644 index 0000000..a3ddf6e --- /dev/null +++ b/shibboleth/attrChecker.html @@ -0,0 +1,57 @@ + + + + + + + + Insufficient Information + + + + + +Logo + +

We're sorry, but you cannot access this service at this time.

+ + +

This service requires information about you that your identity provider +() +did not release. To gain access to this service, your identity provider +must release the required information.

+ + +

+

+Please visit + +the support page +this support page + +for further instructions. +
+

+
+
+ + +

Your session was already invalidated before your information could + be examined for completeness.

+
+ +

+You were trying to access the following URL: +

+

+ + +

For more information about this service, including what user information is +required for access, please visit our +information page.

+
+ + + diff --git a/shibboleth/attribute-map.xml b/shibboleth/attribute-map.xml new file mode 100755 index 0000000..c8a969f --- /dev/null +++ b/shibboleth/attribute-map.xml @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/shibboleth/attribute-policy.xml b/shibboleth/attribute-policy.xml new file mode 100755 index 0000000..74e8777 --- /dev/null +++ b/shibboleth/attribute-policy.xml @@ -0,0 +1,81 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/shibboleth/bindingTemplate.html b/shibboleth/bindingTemplate.html new file mode 100644 index 0000000..59a924b --- /dev/null +++ b/shibboleth/bindingTemplate.html @@ -0,0 +1,58 @@ + + + Shibboleth Authentication Request + + + +

Shibboleth Authentication Request

+ + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + +
+ + diff --git a/shibboleth/console.logger b/shibboleth/console.logger new file mode 100644 index 0000000..dedb731 --- /dev/null +++ b/shibboleth/console.logger @@ -0,0 +1,33 @@ +log4j.rootCategory=WARN, console + +# fairly verbose for DEBUG, so generally leave at INFO +log4j.category.XMLTooling.XMLObject=INFO +log4j.category.XMLTooling.XMLObjectBuilder=INFO +log4j.category.XMLTooling.KeyInfoResolver=INFO +log4j.category.Shibboleth.IPRange=INFO +log4j.category.Shibboleth.PropertySet=INFO + +# raise for low-level tracing of SOAP client HTTP/SSL behavior +log4j.category.XMLTooling.libcurl=INFO + +# useful categories to tune independently: +# +# tracing of SAML messages and security policies +#log4j.category.OpenSAML.MessageDecoder=DEBUG +#log4j.category.OpenSAML.MessageEncoder=DEBUG +#log4j.category.OpenSAML.SecurityPolicyRule=DEBUG +# interprocess message remoting +#log4j.category.Shibboleth.Listener=DEBUG +# mapping of requests to applicationId +#log4j.category.Shibboleth.RequestMapper=DEBUG +# high level session cache operations +#log4j.category.Shibboleth.SessionCache=DEBUG +# persistent storage and caching +#log4j.category.XMLTooling.StorageService=DEBUG + +# define the appender + +log4j.appender.console=org.apache.log4j.ConsoleAppender +#log4j.appender.console.layout=org.apache.log4j.BasicLayout +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S} %p %c %x: %m%n diff --git a/shibboleth/discoveryTemplate.html b/shibboleth/discoveryTemplate.html new file mode 100644 index 0000000..244e1f5 --- /dev/null +++ b/shibboleth/discoveryTemplate.html @@ -0,0 +1,48 @@ + + + Request for Authentication + + +

Request for Authentication

+ +

This web site requires you to login before proceeding. Please identify + the domain name of your organization:

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +

The system was unable to determine how to proceed using the value you supplied.

+
+ + diff --git a/shibboleth/globalLogout.html b/shibboleth/globalLogout.html new file mode 100644 index 0000000..86f2050 --- /dev/null +++ b/shibboleth/globalLogout.html @@ -0,0 +1,29 @@ + + + + + + + + Global Logout + + + + + +Logo + +

Global Logout

+ +

Status of Global Logout:

+ +

If the message above indicates success, you have been logged out of all +the applications and systems that support the logout mechanism.

+ +

Regardless of the outcome, it is strongly advised that you close your browser +to ensure that you complete the logout process.

+ + + diff --git a/shibboleth/keygen.sh b/shibboleth/keygen.sh new file mode 100755 index 0000000..b5378fd --- /dev/null +++ b/shibboleth/keygen.sh @@ -0,0 +1,91 @@ +#! /bin/sh + +while getopts n:h:u:g:o:e:y:bf c + do + case $c in + u) USER=$OPTARG;; + g) GROUP=$OPTARG;; + o) OUT=$OPTARG;; + b) BATCH=1;; + f) FORCE=1;; + h) FQDN=$OPTARG;; + e) ENTITYID=$OPTARG;; + y) YEARS=$OPTARG;; + n) PREFIX=$OPTARG;; + \?) echo "keygen [-o output directory (default .)] [-u username to own keypair] [-g owning groupname] [-h hostname for cert] [-y years to issue cert] [-e entityID to embed in cert] [-n filename prefix (default 'sp')]" + exit 1;; + esac + done + +if [ -z "$OUT" ] ; then + OUT=. +fi + +if [ -z "$PREFIX" ]; then + PREFIX="sp" +fi + +if [ -n "$FORCE" ] ; then + rm $OUT/${PREFIX}-key.pem $OUT/${PREFIX}-cert.pem +fi + +if [ -s $OUT/${PREFIX}-key.pem -o -s $OUT/${PREFIX}-cert.pem ] ; then + if [ -z "$BATCH" ] ; then + echo The files $OUT/${PREFIX}-key.pem and/or $OUT/${PREFIX}-cert.pem already exist! + echo Use -f option to force recreation of keypair. + exit 2 + fi + exit 0 +fi + +if [ -z "$FQDN" ] ; then + FQDN=`hostname` +fi + +if [ -z "$YEARS" ] ; then + YEARS=10 +fi + +DAYS=`expr $YEARS \* 365` + +if [ -z "$ENTITYID" ] ; then + ALTNAME=DNS:$FQDN +else + ALTNAME=DNS:$FQDN,URI:$ENTITYID +fi + +SSLCNF=$OUT/${PREFIX}-cert.cnf +cat >$SSLCNF < /dev/null +fi +rm $SSLCNF + +if [ -s $OUT/${PREFIX}-key.pem -a -n "$USER" ] ; then + chown $USER $OUT/${PREFIX}-key.pem $OUT/${PREFIX}-cert.pem +fi + +if [ -s $OUT/${PREFIX}-key.pem -a -n "$GROUP" ] ; then + chgrp $GROUP $OUT/${PREFIX}-key.pem $OUT/${PREFIX}-cert.pem +fi diff --git a/shibboleth/localLogout.html b/shibboleth/localLogout.html new file mode 100644 index 0000000..75bd3e1 --- /dev/null +++ b/shibboleth/localLogout.html @@ -0,0 +1,27 @@ + + + + + + + + Local Logout + + + + + +Logo + +

Local Logout

+ +Status of Local Logout: + +

+ +You MUST close your browser to complete the logout process. + + + diff --git a/shibboleth/metadataError.html b/shibboleth/metadataError.html new file mode 100644 index 0000000..e0e6a1b --- /dev/null +++ b/shibboleth/metadataError.html @@ -0,0 +1,35 @@ + + + + + + + + Unknown Identity Provider + + + + + +Logo + +

Unknown or Unusable Identity Provider

+ +

The identity provider supplying your login credentials is not authorized +for use with this service or does not support the necessary capabilities.

+ +

To report this problem, please contact the site administrator at +. +

+ +

Please include the following error message in any email:

+

Identity provider lookup failed at ()

+ +

EntityID:

+
+

:

+ + + diff --git a/shibboleth/metagen.sh b/shibboleth/metagen.sh new file mode 100755 index 0000000..f39d53b --- /dev/null +++ b/shibboleth/metagen.sh @@ -0,0 +1,439 @@ +#!/usr/bin/bash + +DECLS=1 + +TYPE="SHIB" + +SAML1=0 +SAML2=0 +ARTIFACT=0 +DS=0 +MDUI=0 +LOGOUT=0 +NAMEIDMGMT=0 + +SAML10PROT="urn:oasis:names:tc:SAML:1.0:protocol" +SAML11PROT="urn:oasis:names:tc:SAML:1.1:protocol" +SAML20PROT="urn:oasis:names:tc:SAML:2.0:protocol" + +SAML20SOAP="urn:oasis:names:tc:SAML:2.0:bindings:SOAP" +SAML20REDIRECT="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" +SAML20POST="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" +SAML20POSTSS="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST-SimpleSign" +SAML20ART="urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact" +SAML20PAOS="urn:oasis:names:tc:SAML:2.0:bindings:PAOS" + +SAML1POST="urn:oasis:names:tc:SAML:1.0:profiles:browser-post" +SAML1ART="urn:oasis:names:tc:SAML:1.0:profiles:artifact-01" + +while getopts a:c:e:f:h:l:n:o:s:t:u:y:d:T:12ADLNOU c + do + case $c in + c) CERTS[${#CERTS[*]}]=$OPTARG;; + e) ENTITYID=$OPTARG;; + f) FORMATS[${#FORMATS[*]}]=$OPTARG;; + h) HOSTS[${#HOSTS[*]}]=$OPTARG;; + l) HOSTLIST=$OPTARG;; + n) NAKEDHOSTS[${#NAKEDHOSTS[*]}]=$OPTARG;; + o) ORGNAME=$OPTARG;; + a) ADMIN[${#ADMIN[*]}]=$OPTARG;; + s) SUP[${#SUP[*]}]=$OPTARG;; + t) TECH[${#TECH[*]}]=$OPTARG;; + u) URL=$OPTARG;; + y) DISPLAYNAME=$OPTARG;; + d) DESC=$OPTARG;; + 1) SAML1=1;; + 2) SAML2=1;; + A) ARTIFACT=1;; + D) DS=1;; + L) LOGOUT=1;; + N) NAMEIDMGMT=1;; + O) DECLS=0;; + T) TYPE=$OPTARG;; + U) MDUI=1;; + \?) echo metagen [-12ADLNOU] -c cert1 [-c cert2 ...] -h host1 [-h host2 ...] [-e entityID] + exit 1;; + esac + done + +if [ ! -z $HOSTLIST ] ; then + if [ -s $HOSTLIST ] ; then + while read h + do + HOSTS[${#HOSTS[@]}]=$h + done <$HOSTLIST + else + echo File with list of hostnames $l does not exist! + exit 2 + fi +fi + +if [ ${#HOSTS[*]} -eq 0 -a ${#NAKEDHOSTS[*]} -eq 0 ] ; then + echo metagen [-12ADLN] -c cert1 [-c cert2 ...] -h host1 [-h host2 ...] [-e entityID] + exit 1 +fi + +if [ ${#CERTS[*]} -eq 0 ] ; then + CERTS[${#CERTS[*]}]=sp-cert.pem +fi + +for c in ${CERTS[@]} +do + if [ ! -s $c ] ; then + echo Certificate file $c does not exist! + exit 2 + fi +done + +if [ $TYPE == "SHIB" ] ; then + EIDSUFFIX=shibboleth +elif [ $TYPE == "SSP" ] ; then + EIDSUFFIX=simplesaml +else + echo "Unknown type: $TYPE \(SHIB and SSP are supported\)" + exit 3 +fi + + +if [ -z $ENTITYID ] ; then + if [ ${#HOSTS[*]} -eq 0 ] ; then + ENTITYID=https://${NAKEDHOSTS[0]}/$EIDSUFFIX + else + ENTITYID=https://${HOSTS[0]}/$EIDSUFFIX + fi +fi + +# Establish protocols and bindings. + +if [ $SAML1 -eq 0 -a $SAML2 -eq 0 ] ; then + SAML1=1 + SAML2=1 +fi + +if [ $LOGOUT -eq 1 ] ; then + SAML2=1 + if [ $TYPE == "SHIB" ] ; then + SLO[${#SLO[*]}]=$SAML20SOAP + SLO[${#SLO[*]}]=$SAML20REDIRECT + SLO[${#SLO[*]}]=$SAML20POST + SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/SOAP" + SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/Redirect" + SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/POST" + elif [ $TYPE == "SSP" ] ; then + SLO[${#SLO[*]}]=$SAML20SOAP + SLO[${#SLO[*]}]=$SAML20REDIRECT + SLOLOC[${#SLOLOC[*]}]="simplesaml/module.php/saml/sp/saml2-logout.php/default-sp" + SLOLOC[${#SLOLOC[*]}]="simplesaml/module.php/saml/sp/saml2-logout.php/default-sp" + fi + if [ $ARTIFACT -eq 1 -a $TYPE == "SHIB" ] ; then + SLO[${#SLO[*]}]=$SAML20ART + SLOLOC[${#SLOLOC[*]}]="Shibboleth.sso/SLO/Artifact" + fi +fi + +if [ $NAMEIDMGMT -eq 1 -a $TYPE == "SHIB" ] ; then + SAML2=1 + NIM[${#NIM[*]}]=$SAML20SOAP + NIM[${#NIM[*]}]=$SAML20REDIRECT + NIM[${#NIM[*]}]=$SAML20POST + NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/SOAP" + NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/Redirect" + NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/POST" + if [ $ARTIFACT -eq 1 -a $TYPE == "SHIB" ] ; then + NIM[${#NIM[*]}]=$SAML20ART + NIMLOC[${#NIMLOC[*]}]="Shibboleth.sso/NIM/Artifact" + fi +fi + +if [ $SAML1 -eq 1 -a $SAML2 -eq 1 ] ; then + PROTENUM="$SAML20PROT $SAML11PROT" +elif [ $SAML1 -eq 1 ] ; then + PROTENUM="$SAML11PROT" +else + PROTENUM="$SAML20PROT" +fi + +if [ $SAML2 -eq 1 ] ; then + if [ $TYPE == "SHIB" ] ; then + ACS[${#ACS[*]}]=$SAML20POST + ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/POST" + ACS[${#ACS[*]}]=$SAML20POSTSS + ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/POST-SimpleSign" + if [ $ARTIFACT -eq 1 ] ; then + ACS[${#ACS[*]}]=$SAML20ART + ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/Artifact" + fi + ACS[${#ACS[*]}]=$SAML20PAOS + ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML2/ECP" + elif [ $TYPE == "SSP" ] ; then + ACS[${#ACS[*]}]=$SAML20POST + ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml2-acs.php/default-sp" + if [ $ARTIFACT -eq 1 ] ; then + ACS[${#ACS[*]}]=$SAML20ART + ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml2-acs.php/default-sp" + fi + fi +fi + +if [ $SAML1 -eq 1 ] ; then + if [ $TYPE == "SHIB" ] ; then + ACS[${#ACS[*]}]=$SAML1POST + ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML/POST" + if [ $ARTIFACT -eq 1 ] ; then + ACS[${#ACS[*]}]=$SAML1ART + ACSLOC[${#ACSLOC[*]}]="Shibboleth.sso/SAML/Artifact" + fi + elif [ $TYPE == "SSP" ] ; then + ACS[${#ACS[*]}]=$SAML1POST + ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml1-acs.php/default-sp" + if [ $ARTIFACT -eq 1 ] ; then + ACS[${#ACS[*]}]=$SAML1ART + ACSLOC[${#ACSLOC[*]}]="simplesaml/module.php/saml/sp/saml1-acs.php/default-sp/artifact" + fi + fi +fi + +if [ $DECLS -eq 1 ] ; then + DECLS="xmlns:md=\"urn:oasis:names:tc:SAML:2.0:metadata\" xmlns:ds=\"http://www.w3.org/2000/09/xmldsig#\" " + if [ $DS -eq 1 ] ; then + DECLS="${DECLS}xmlns:disco=\"urn:oasis:names:tc:SAML:profiles:SSO:idp-discovery-protocol\" " + fi + if [ $MDUI -eq 1 ] ; then + DECLS="${DECLS}xmlns:mdui=\"urn:oasis:names:tc:SAML:metadata:ui\" " + fi +else + DECLS="" +fi + +cat < + +EOF + +# Discovery BEGIN +if [ $DS -eq 1 -a $TYPE == "SHIB" -o $MDUI -eq 1 ] ; then + +cat << EOF + +EOF + +if [ $MDUI -eq 1 ] ; then + cat << EOF + +EOF + + if [ -n "$DISPLAYNAME" ] ; then + cat << EOF + $DISPLAYNAME +EOF + fi + + if [ -n "$DESC" ] ; then + cat << EOF + $DESC +EOF + fi + + cat << EOF + +EOF +fi + +if [ $DS -eq 1 -a $TYPE == "SHIB" ] ; then + count=1 + for h in ${HOSTS[@]} + do + cat << EOF + +EOF + let "count++" + done + + for h in ${NAKEDHOSTS[@]} + do + cat << EOF + +EOF + let "count++" + done +fi + +cat << EOF + +EOF + +fi +# Discovery END + +for c in ${CERTS[@]} +do +cat << EOF + + + + +EOF +grep -v ^- $c +cat << EOF + + + + +EOF +done + +# Logout BEGIN +if [ $LOGOUT -eq 1 ] ; then + +for h in ${HOSTS[@]} +do + count=0 + while [ $count -lt ${#SLO[*]} ] + do + cat < +EOF + let "count++" + done +done + +for h in ${NAKEDHOSTS[@]} +do + count=0 + while [ $count -lt ${#SLO[*]} ] + do + cat < +EOF + let "count++" + done +done + +fi +# Logout END + +# NameID Mgmt BEGIN +if [ $NAMEIDMGMT -eq 1 -a $TYPE == "SHIB" ] ; then + +for h in ${HOSTS[@]} +do + count=0 + while [ $count -lt ${#NIM[*]} ] + do + cat < +EOF + let "count++" + done +done + +for h in ${NAKEDHOSTS[@]} +do + count=0 + while [ $count -lt ${#NIM[*]} ] + do + cat < +EOF + let "count++" + done +done + +fi +# NameID Mgmt END + +for f in ${FORMATS[@]} +do +cat << EOF + $f +EOF +done + +index=0 +for h in ${HOSTS[@]} +do + count=0 + while [ $count -lt ${#ACS[*]} ] + do + cat < +EOF + let "count++" + let "index++" + done +done + +for h in ${NAKEDHOSTS[@]} +do + count=0 + while [ $count -lt ${#ACS[*]} ] + do + cat < +EOF + let "count++" + let "index++" + done +done + +cat < +EOF + +if [ -n "$ORGNAME" ] ; then + if [ -z "$URL" ] ; then + URL=$ENTITYID + fi + cat < + $ORGNAME + $ORGNAME + $URL + +EOF +fi + +count=${#ADMIN[*]} +for (( i=0; i + ${c[0]} + ${c[1]} + ${c[2]} + +EOF +done + +count=${#SUP[*]} +for (( i=0; i + ${c[0]} + ${c[1]} + ${c[2]} + +EOF +done + +count=${#TECH[*]} +for (( i=0; i + ${c[0]} + ${c[1]} + ${c[2]} + +EOF +done + +cat < + +EOF + diff --git a/shibboleth/native.logger b/shibboleth/native.logger new file mode 100644 index 0000000..e9a43a5 --- /dev/null +++ b/shibboleth/native.logger @@ -0,0 +1,30 @@ +# set overall behavior +log4j.rootCategory=WARN, native_log + +# fairly verbose for DEBUG, so generally leave at WARN/INFO +log4j.category.XMLTooling.XMLObject=WARN +log4j.category.XMLTooling.XMLObjectBuilder=WARN +log4j.category.XMLTooling.KeyInfoResolver=WARN +log4j.category.Shibboleth.IPRange=WARN +log4j.category.Shibboleth.PropertySet=WARN + +# useful categories to tune independently: +# +# interprocess message remoting +#log4j.category.Shibboleth.Listener=DEBUG +# mapping of requests to applicationId +#log4j.category.Shibboleth.RequestMapper=DEBUG +# high level session cache operations +#log4j.category.Shibboleth.SessionCache=DEBUG + +# define the appender + +# Change to SyslogAppender for remote syslog, and set host/port +log4j.appender.native_log=org.apache.log4j.LocalSyslogAppender +#log4j.appender.native_log.syslogHost=localhost +#log4j.appender.native_log.portNumber=514 +log4j.appender.native_log.syslogName=shibboleth +# Facility is numeric, 16 is LOCAL0 +log4j.appender.native_log.facility=16 +log4j.appender.native_log.layout=org.apache.log4j.PatternLayout +log4j.appender.native_log.layout.ConversionPattern=%p %c %x: %m%n diff --git a/shibboleth/partialLogout.html b/shibboleth/partialLogout.html new file mode 100644 index 0000000..fe24a7c --- /dev/null +++ b/shibboleth/partialLogout.html @@ -0,0 +1,24 @@ + + + + + + + + Partial Logout + + + + + +Logo + +

Partial Logout

+ +

You remain logged into one or more applications accessed during your session. +To complete the logout process, please close/exit your browser completely.

+ + + diff --git a/shibboleth/postTemplate.html b/shibboleth/postTemplate.html new file mode 100644 index 0000000..d8c4728 --- /dev/null +++ b/shibboleth/postTemplate.html @@ -0,0 +1,37 @@ + + + Login Completed + + + +

Login Completed

+ + + +
+ + + + +
+ + diff --git a/shibboleth/protocols.xml b/shibboleth/protocols.xml new file mode 100644 index 0000000..648bcbc --- /dev/null +++ b/shibboleth/protocols.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/shibboleth/seckeygen.sh b/shibboleth/seckeygen.sh new file mode 100755 index 0000000..cb8df4d --- /dev/null +++ b/shibboleth/seckeygen.sh @@ -0,0 +1,56 @@ +#! /bin/sh + +while getopts h:u:g:o:f:b: c + do + case $c in + u) USER=$OPTARG;; + g) GROUP=$OPTARG;; + o) OUT=$OPTARG;; + b) BITS=$OPTARG;; + f) FILENAME=$OPTARG;; + h) HISTORY=$OPTARG;; + \?) echo "seckeygen [-o output directory (default .)] [-f filename (default sealer.keys)] [-h key history (default 14)] [-b key size in bits (default 128)] [-u owning username] [-g owning groupname]" + exit 1;; + esac + done + +if [ -z "$OUT" ] ; then + OUT=. +fi + +if [ -z "$FILENAME" ]; then + FILENAME="sealer.keys" +fi + +if [ -z "$HISTORY" ] ; then + HISTORY=14 +fi + +if [ -z "$BITS" ] ; then + BITS=128 +fi + +BYTES=`expr $BITS / 8` + +rm -f $OUT/${FILENAME}.tmp +touch $OUT/${FILENAME}.tmp +chmod 600 $OUT/${FILENAME}.tmp + +KEYVER=1 +if [ -f $OUT/${FILENAME} -a $HISTORY -gt 0 ] ; then + tail -n `expr $HISTORY - 1` $OUT/${FILENAME} > $OUT/${FILENAME}.tmp + KEYVER=`tail -n 1 $OUT/${FILENAME}.tmp | awk -F: '{print $1}'` + KEYVER=`expr $KEYVER + 1` +fi +KEYVAL=`openssl rand -base64 $BYTES 2> /dev/null` +echo "${KEYVER}:${KEYVAL}" >> $OUT/${FILENAME}.tmp + +mv $OUT/${FILENAME}.tmp $OUT/${FILENAME} + +if [ -s $OUT/${FILENAME} -a -n "$USER" ] ; then + chown $USER $OUT/${FILENAME} +fi + +if [ -s $OUT/${FILENAME} -a -n "$GROUP" ] ; then + chgrp $GROUP $OUT/${FILENAME} +fi diff --git a/shibboleth/security-policy.xml b/shibboleth/security-policy.xml new file mode 100644 index 0000000..f8eaacd --- /dev/null +++ b/shibboleth/security-policy.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/shibboleth/sessionError.html b/shibboleth/sessionError.html new file mode 100644 index 0000000..7ccf17b --- /dev/null +++ b/shibboleth/sessionError.html @@ -0,0 +1,45 @@ + + + + + + + + <shibmlp errorType/> + + + + + +Logo + +

+ +

The system encountered an error at

+ +

To report this problem, please contact the site administrator at +. +

+ +

Please include the following message in any email:

+

at ()

+ +

+ + +

Error from identity provider:

+
+ Status:
+ + Sub-Status:
+
+ + Message:
+
+
+
+ + + diff --git a/shibboleth/shibboleth2.xml b/shibboleth/shibboleth2.xml new file mode 100755 index 0000000..691a188 --- /dev/null +++ b/shibboleth/shibboleth2.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + SAML2 SAML1 + + + + SAML2 Local + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/shibboleth/shibd-amazon b/shibboleth/shibd-amazon new file mode 100644 index 0000000..5275390 --- /dev/null +++ b/shibboleth/shibd-amazon @@ -0,0 +1,133 @@ +#!/bin/bash +# +# shibd Shibboleth Service Provider Daemon +# +# chkconfig: - 80 20 +# description: Shibboleth 2 Service Provider Daemon +# processname: shibd +# pidfile: /var/run/shibboleth/shibd.pid +# config: /etc/shibboleth/shibboleth2.xml + +### BEGIN INIT INFO +# Provides: shibd +# Required-Start: $local_fs $remote_fs $network +# Should-Start: $time +# Should-Stop: $time +# Required-Stop: $local_fs $remote_fs $network +# Default-Start: 3 5 +# Default-Stop: 0 1 2 6 +# Short-Description: Shibboleth 2 Service Provider Daemon +# Description: Starts the separate daemon used by the Shibboleth Apache module to manage state and SAML interactions. +### END INIT INFO + +# Source function library. +. /etc/rc.d/init.d/functions + +shibd="/usr/sbin/shibd" +SHIBD_USER=root +SHIBD_UMASK=022 +SHIBD_WAIT=30 +prog=shibd +pidfile=/var/run/shibboleth/shibd.pid +lockfile=/var/lock/subsys/$prog + +[ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog + +umask $SHIBD_UMASK + +start() { + echo -n $"Starting $prog: " + if [ -f $lockfile ] ; then + if [ -f $pidfile ]; then + read kpid < $pidfile + if checkpid $kpid 2>&1; then + echo "process already running" + return 1; + else + echo "lock file found but no process running for pid $kpid, continuing" + fi + fi + fi + + # Make sure package run directory exists. + [ -d /var/run/shibboleth ] || mkdir /var/run/shibboleth + + export SHIBD_PID=$pidfile + touch $pidfile + chown $SHIBD_USER:$SHIBD_USER $pidfile + + # Handle transition from root to non-root packages. + chown -R $SHIBD_USER:$SHIBD_USER /var/run/shibboleth /var/cache/shibboleth 2>/dev/null || : + daemon --user $SHIBD_USER $shibd -p $pidfile -f -w $SHIBD_WAIT + + RETVAL=$? + echo + [ $RETVAL -eq 0 ] && touch $lockfile + return $RETVAL +} + +stop() { + echo -n $"Stopping $prog: " + killproc shibd + + RETVAL=$? + echo + [ $RETVAL -eq 0 ] && rm -f $lockfile $pidfile + return $RETVAL +} + +restart() { + stop + sleep 5 + start +} + +reload() { + restart +} + +force_reload() { + restart +} + +rh_status() { + # run checks to determine if the service is running or use generic status + status $prog +} + +rh_status_q() { + rh_status >/dev/null 2>&1 +} + +case "$1" in + start) + rh_status_q && exit 0 + $1 + ;; + stop) + rh_status_q || exit 0 + $1 + ;; + restart) + $1 + ;; + reload) + rh_status_q || exit 7 + $1 + ;; + force-reload) + force_reload + ;; + status) + rh_status + ;; + condrestart|try-restart) + rh_status_q || exit 0 + restart + ;; + *) + echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}" + exit 2 +esac + +exit $? diff --git a/shibboleth/shibd-debian b/shibboleth/shibd-debian new file mode 100644 index 0000000..f9860d0 --- /dev/null +++ b/shibboleth/shibd-debian @@ -0,0 +1,168 @@ +#! /bin/sh +### BEGIN INIT INFO +# Provides: shibd +# Required-Start: $local_fs $remote_fs $network +# Required-Stop: $local_fs $remote_fs +# Default-Start: 2 3 4 5 +# Default-Stop: +# Short-Description: Shibboleth 3 Service Provider Daemon +# Description: Starts the separate daemon used by the Shibboleth +# Apache module to manage sessions and to retrieve +# attributes from Shibboleth Identity Providers. +### END INIT INFO +# +# Written by Quanah Gibson-Mount +# Modified by Lukas Haemmerle for Shibboleth 2 +# Updated to use the LSB init functions by Russ Allbery +# +# Based on the dh-make template written by: +# +# Written by Miquel van Smoorenburg . +# Modified for Debian +# by Ian Murdock . + +PATH=/sbin:/bin:/usr/sbin:/usr/bin +DESC="Shibboleth 3 daemon" +NAME=shibd +SHIB_HOME=/usr +SHIBSP_CONFIG=/etc/shibboleth/shibboleth2.xml +SHIBD_WAIT=30 +LD_LIBRARY_PATH=/usr/lib +DAEMON=/usr/sbin/$NAME +SCRIPTNAME=/etc/init.d/$NAME +PIDFILE=/var/run/shibboleth/$NAME.pid +DAEMON_OPTS="" +DAEMON_USER=_shibd + +# Read configuration if it is present. +[ -r /etc/default/$NAME ] && . /etc/default/$NAME + +# Force removal of socket +DAEMON_OPTS="$DAEMON_OPTS -f" + +# Use defined configuration file +DAEMON_OPTS="$DAEMON_OPTS -c $SHIBSP_CONFIG" + +# Specify pid file to use +DAEMON_OPTS="$DAEMON_OPTS -p $PIDFILE" + +# Specify wait time to use +DAEMON_OPTS="$DAEMON_OPTS -w $SHIBD_WAIT" + +# Exit if the package is not installed. +[ -x "$DAEMON" ] || exit 0 + +# Load the VERBOSE setting and other rcS variables +. /lib/init/vars.sh + +# Define LSB log_* functions. +. /lib/lsb/init-functions + +prepare_environment () { + # Ensure /var/run/shibboleth exists. /var/run may be on a tmpfs file system. + [ -d '/var/run/shibboleth' ] || mkdir -p '/var/run/shibboleth' + + # If $DAEMON_USER is set, try to run shibd as that user. However, + # versions of the Debian package prior to 2.3+dfsg-1 ran shibd as root, + # and the local administrator may not have made the server's private key + # readable by $DAEMON_USER. We therefore test first by running shibd -t + # and looking for the error code indicating that the private key could not + # be read. If we get that error, we fall back on running shibd as root. + if [ -n "$DAEMON_USER" ]; then + DIAG=$(su -s $DAEMON $DAEMON_USER -- -t $DAEMON_OPTS 2>/dev/null) + if [ $? = 0 ] ; then + # openssl errstr 200100D (hex for 33558541) says: + # error:0200100D:system library:fopen:Permission denied + ERROR='ERROR OpenSSL : error code: 33558541 ' + if echo "$DIAG" | fgrep -q "$ERROR" ; then + unset DAEMON_USER + log_warning_msg "$NAME: file permissions require running as" \ + "root" + else + chown -Rh "$DAEMON_USER" '/var/run/shibboleth' '/var/log/shibboleth' + fi + else + unset DAEMON_USER + log_warning_msg "$NAME: unable to run config check as user" \ + "$DAEMON_USER" + fi + unset DIAG + fi +} + +# Start shibd. +do_start () { + # Return + # 0 if daemon has been started + # 1 if daemon was already running + # 2 if daemon could not be started + start-stop-daemon --start --quiet ${DAEMON_USER:+--chuid $DAEMON_USER} \ + --pidfile $PIDFILE --exec $DAEMON --test > /dev/null \ + || return 1 + start-stop-daemon --start --quiet ${DAEMON_USER:+--chuid $DAEMON_USER} \ + --pidfile $PIDFILE --exec $DAEMON -- $DAEMON_OPTS \ + || return 2 +} + +# Stop shibd. +do_stop () { + # Return + # 0 if daemon has been stopped + # 1 if daemon was already stopped + # 2 if daemon could not be stopped + # other if a failure occurred + start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 \ + --pidfile $PIDFILE --name $NAME + RETVAL="$?" + return "$RETVAL" +} + +case "$1" in +start) + prepare_environment + + [ "$VERBOSE" != no ] && log_daemon_msg "Starting $DESC" "$NAME" + do_start + case "$?" in + 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; + 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; + esac + ;; +stop) + [ "$VERBOSE" != no ] && log_daemon_msg "Stopping $DESC" "$NAME" + do_stop + case "$?" in + 0|1) [ "$VERBOSE" != no ] && log_end_msg 0 ;; + 2) [ "$VERBOSE" != no ] && log_end_msg 1 ;; + esac + ;; +restart|force-reload) + prepare_environment + + log_daemon_msg "Restarting $DESC" "$NAME" + do_stop + case "$?" in + 0|1) + do_start + case "$?" in + 0) log_end_msg 0 ;; + 1) log_end_msg 1 ;; # Old process is still running + *) log_end_msg 1 ;; # Failed to start + esac + ;; + *) + # Failed to stop + log_end_msg 1 + ;; + esac + ;; +status) + status_of_proc -p "$PIDFILE" "$DAEMON" "$NAME" && exit 0 || exit $? + ;; +*) + echo "Usage: $SCRIPTNAME {start|stop|restart|force-reload|status}" >&2 + exit 1 + ;; +esac + +exit 0 diff --git a/shibboleth/shibd-osx.plist b/shibboleth/shibd-osx.plist new file mode 100644 index 0000000..795c312 --- /dev/null +++ b/shibboleth/shibd-osx.plist @@ -0,0 +1,23 @@ + + + + + Label net.shibboleth.sp.shibd + ServiceDescription Shibboleth 3 Service Provider daemon + + ProgramArguments + + /usr/sbin/shibd + -F + -f + -p + /var/run/shibboleth/shibd.pid + + + RunAtLoad + OnDemand + StandardErrorPath /dev/null + UserName root + Umask 0022 + + diff --git a/shibboleth/shibd-redhat b/shibboleth/shibd-redhat new file mode 100755 index 0000000..f53a954 --- /dev/null +++ b/shibboleth/shibd-redhat @@ -0,0 +1,133 @@ +#!/bin/bash +# +# shibd Shibboleth Service Provider Daemon +# +# chkconfig: - 80 20 +# description: Shibboleth 3 Service Provider Daemon +# processname: shibd +# pidfile: /var/run/shibboleth/shibd.pid +# config: /etc/shibboleth/shibboleth2.xml + +### BEGIN INIT INFO +# Provides: shibd +# Required-Start: $local_fs $remote_fs $network +# Should-Start: $time +# Should-Stop: $time +# Required-Stop: $local_fs $remote_fs $network +# Default-Start: 3 5 +# Default-Stop: 0 1 2 6 +# Short-Description: Shibboleth 3 Service Provider Daemon +# Description: Starts the separate daemon used by the Shibboleth Apache module to manage state and SAML interactions. +### END INIT INFO + +# Source function library. +. /etc/rc.d/init.d/functions + +shibd="/usr/sbin/shibd" +SHIBD_USER=shibd +SHIBD_UMASK=022 +SHIBD_WAIT=30 +prog=shibd +pidfile=/var/run/shibboleth/shibd.pid +lockfile=/var/lock/subsys/$prog + +[ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog + +umask $SHIBD_UMASK + +start() { + echo -n $"Starting $prog: " + if [ -f $lockfile ] ; then + if [ -f $pidfile ]; then + read kpid < $pidfile + if checkpid $kpid 2>&1; then + echo "process already running" + return 1; + else + echo "lock file found but no process running for pid $kpid, continuing" + fi + fi + fi + + # Make sure package run directory exists. + [ -d /var/run/shibboleth ] || mkdir /var/run/shibboleth + + export SHIBD_PID=$pidfile + touch $pidfile + chown $SHIBD_USER:$SHIBD_USER $pidfile + + # Handle transition from root to non-root packages. + chown -R $SHIBD_USER:$SHIBD_USER /var/run/shibboleth /var/cache/shibboleth 2>/dev/null || : + daemon --user $SHIBD_USER $shibd -p $pidfile -f -w $SHIBD_WAIT + + RETVAL=$? + echo + [ $RETVAL -eq 0 ] && touch $lockfile + return $RETVAL +} + +stop() { + echo -n $"Stopping $prog: " + killproc shibd + + RETVAL=$? + echo + [ $RETVAL -eq 0 ] && rm -f $lockfile $pidfile + return $RETVAL +} + +restart() { + stop + sleep 5 + start +} + +reload() { + restart +} + +force_reload() { + restart +} + +rh_status() { + # run checks to determine if the service is running or use generic status + status $prog +} + +rh_status_q() { + rh_status >/dev/null 2>&1 +} + +case "$1" in + start) + rh_status_q && exit 0 + $1 + ;; + stop) + rh_status_q || exit 0 + $1 + ;; + restart) + $1 + ;; + reload) + rh_status_q || exit 7 + $1 + ;; + force-reload) + force_reload + ;; + status) + rh_status + ;; + condrestart|try-restart) + rh_status_q || exit 0 + restart + ;; + *) + echo $"Usage: $0 {start|stop|status|restart|condrestart|try-restart|reload|force-reload}" + exit 2 +esac + +exit $? diff --git a/shibboleth/shibd-suse b/shibboleth/shibd-suse new file mode 100644 index 0000000..d90dfab --- /dev/null +++ b/shibboleth/shibd-suse @@ -0,0 +1,130 @@ +#! /bin/sh +# Author: Peter Schober and many others +# based on shibd-debian (from Shibboleth's 1.3.1 SP source distribution) +# and SUSE's /etc/init.d/cyrus +# +# /etc/init.d/shibd +# +### BEGIN INIT INFO +# Provides: shibd +# Required-Start: $local_fs $remote_fs $network +# Should-Start: $time +# Should-Stop: $time +# Required-Stop: $local_fs $remote_fs $network +# Default-Start: 3 5 +# Default-Stop: 0 1 2 6 +# Short-Description: Shibboleth 3 Service Provider Daemon +# Description: Starts the separate daemon used by the Shibboleth Apache module to manage state and SAML interactions. +### END INIT INFO +# + +DESC="Shibboleth 3 daemon" +NAME=shibd +SHIB_CONFIG=/etc/shibboleth/shibboleth2.xml +DAEMON=/usr/sbin/$NAME +SCRIPTNAME=/etc/init.d/$NAME +PID_FILE=/var/run/shibboleth/shibd.pid +SHIBD_USER=shibd +SHIBD_UMASK=022 +SHIBD_WAIT=30 +DAEMON_OPTS="" + +[ -e /etc/sysconfig/$NAME ] && . /etc/sysconfig/$NAME + +# Force removal of socket +DAEMON_OPTS="$DAEMON_OPTS -f" + +# Use defined configuration file +DAEMON_OPTS="$DAEMON_OPTS -c $SHIB_CONFIG" + +# Specify pid file to use +DAEMON_OPTS="$DAEMON_OPTS -p $PID_FILE" + +# Specify wait time to use +DAEMON_OPTS="$DAEMON_OPTS -w $SHIBD_WAIT" + +umask $SHIBD_UMASK + +# Exit if the package is not installed. +test -x "$DAEMON" || exit 5 + +. /etc/rc.status + +# First reset status of this service +rc_reset + +case "$1" in + start) + # Make sure package run directory exists. + [ -d /var/run/shibboleth ] || mkdir /var/run/shibboleth + + # Handle transition from root to non-root packages. + chown -R $SHIBD_USER:$SHIBD_USER /var/run/shibboleth /var/cache/shibboleth 2>/dev/null || : + + echo -n "Starting $DESC ($NAME)" + ## Start daemon with startproc(8). If this fails + ## the echo return value is set appropriate. + + # NOTE: startproc return 0, even if service is + # already running to match LSB spec. + /sbin/startproc -u $SHIBD_USER -p $PID_FILE $DAEMON $DAEMON_OPTS > /dev/null 2>&1 + + # Remember status and be verbose + rc_status -v + ;; + stop) + echo -n "Shutting down $DESC ($NAME)" + ## Stop daemon with killproc(8) and if this fails + ## set echo the echo return value. + + /sbin/killproc -p $PID_FILE -TERM $DAEMON > /dev/null 2>&1 + + # Remember status and be verbose + rc_status -v + ;; + try-restart) + ## Stop the service and if this succeeds (i.e. the + ## service was running before), start it again. + ## Note: try-restart is not (yet) part of LSB (as of 0.7.5) + $0 status >/dev/null && $0 restart + + # Remember status and be quiet + rc_status + ;; + restart) + ## Stop the service and regardless of whether it was + ## running or not, start it again. + $0 stop + $0 start + + # Remember status and be quiet + rc_status + ;; + configtest) + ## Check config files + + echo -n "Checking config for $DESC ($NAME): " + $DAEMON $DAEMON_OPTS -t + rc_status -v + ;; + status) + echo -n "Checking for service $DESC ($NAME)" + ## Check status with checkproc(8), if process is running + ## checkproc will return with exit status 0. + + # Status has a slightly different for the status command: + # 0 - service running + # 1 - service dead, but /var/run/ pid file exists + # 2 - service dead, but /var/lock/ lock file exists + # 3 - service not running + + # NOTE: checkproc returns LSB compliant status values. + /sbin/checkproc -p $PID_FILE $DAEMON + rc_status -v + ;; + *) + echo "Usage: $0 {start|stop|status|configtest|try-restart|restart}" + exit 1 + ;; +esac +rc_exit diff --git a/shibboleth/shibd-systemd b/shibboleth/shibd-systemd new file mode 100644 index 0000000..c02f0d6 --- /dev/null +++ b/shibboleth/shibd-systemd @@ -0,0 +1,23 @@ +[Unit] +Description=Shibboleth Service Provider Daemon +Documentation=man:shibd(8) +Documentation=https://wiki.shibboleth.net/confluence/display/SP3/Home +After=network.target +Before=httpd.service + +[Service] +Type=notify +NotifyAccess=main +User=shibd +#Environment=LD_LIBRARY_PATH=/opt/shibboleth/lib +ExecStart=/usr/sbin/shibd -f -F +StandardInput=null +StandardOutput=null +StandardError=journal +TimeoutStopSec=1min +TimeoutStartSec=5min +Restart=on-failure +RestartSec=30s + +[Install] +WantedBy=multi-user.target diff --git a/shibboleth/shibd.logger b/shibboleth/shibd.logger new file mode 100644 index 0000000..39950c5 --- /dev/null +++ b/shibboleth/shibd.logger @@ -0,0 +1,73 @@ +# set overall behavior +log4j.rootCategory=INFO, shibd_log, warn_log + +# fairly verbose for DEBUG, so generally leave at INFO +log4j.category.XMLTooling.XMLObject=INFO +log4j.category.XMLTooling.XMLObjectBuilder=INFO +log4j.category.XMLTooling.KeyInfoResolver=INFO +log4j.category.Shibboleth.IPRange=INFO +log4j.category.Shibboleth.PropertySet=INFO + +# raise for low-level tracing of SOAP client HTTP/SSL behavior +log4j.category.XMLTooling.libcurl=INFO + +# useful categories to tune independently: +# +# tracing of SAML messages and security policies +#log4j.category.OpenSAML.MessageDecoder=DEBUG +#log4j.category.OpenSAML.MessageEncoder=DEBUG +#log4j.category.OpenSAML.SecurityPolicyRule=DEBUG +#log4j.category.XMLTooling.SOAPClient=DEBUG +# interprocess message remoting +#log4j.category.Shibboleth.Listener=DEBUG +# mapping of requests to applicationId +#log4j.category.Shibboleth.RequestMapper=DEBUG +# high level session cache operations +#log4j.category.Shibboleth.SessionCache=DEBUG +# persistent storage and caching +#log4j.category.XMLTooling.StorageService=DEBUG + +# logs XML being signed or verified if set to DEBUG +log4j.category.XMLTooling.Signature.Debugger=INFO, sig_log +log4j.additivity.XMLTooling.Signature.Debugger=false +log4j.ownAppenders.XMLTooling.Signature.Debugger=true + +# the tran log blocks the "default" appender(s) at runtime +# Level should be left at INFO for this category +log4j.category.Shibboleth-TRANSACTION=INFO, tran_log +log4j.additivity.Shibboleth-TRANSACTION=false +log4j.ownAppenders.Shibboleth-TRANSACTION=true + +# uncomment to suppress particular event types +#log4j.category.Shibboleth-TRANSACTION.AuthnRequest=WARN +#log4j.category.Shibboleth-TRANSACTION.Login=WARN +#log4j.category.Shibboleth-TRANSACTION.Logout=WARN + +# define the appenders + +log4j.appender.shibd_log=org.apache.log4j.RollingFileAppender +log4j.appender.shibd_log.fileName=/var/log/shibboleth/shibd.log +log4j.appender.shibd_log.maxFileSize=1000000 +log4j.appender.shibd_log.maxBackupIndex=10 +log4j.appender.shibd_log.layout=org.apache.log4j.PatternLayout +log4j.appender.shibd_log.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S} %p %c %x: %m%n + +log4j.appender.warn_log=org.apache.log4j.RollingFileAppender +log4j.appender.warn_log.fileName=/var/log/shibboleth/shibd_warn.log +log4j.appender.warn_log.maxFileSize=1000000 +log4j.appender.warn_log.maxBackupIndex=10 +log4j.appender.warn_log.layout=org.apache.log4j.PatternLayout +log4j.appender.warn_log.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S} %p %c %x: %m%n +log4j.appender.warn_log.threshold=WARN + +log4j.appender.tran_log=org.apache.log4j.RollingFileAppender +log4j.appender.tran_log.fileName=/var/log/shibboleth/transaction.log +log4j.appender.tran_log.maxFileSize=1000000 +log4j.appender.tran_log.maxBackupIndex=20 +log4j.appender.tran_log.layout=org.apache.log4j.PatternLayout +log4j.appender.tran_log.layout.ConversionPattern=%d{%Y-%m-%d %H:%M:%S}|%c|%m%n + +log4j.appender.sig_log=org.apache.log4j.FileAppender +log4j.appender.sig_log.fileName=/var/log/shibboleth/signature.log +log4j.appender.sig_log.layout=org.apache.log4j.PatternLayout +log4j.appender.sig_log.layout.ConversionPattern=%m diff --git a/shibboleth/sslError.html b/shibboleth/sslError.html new file mode 100644 index 0000000..367366a --- /dev/null +++ b/shibboleth/sslError.html @@ -0,0 +1,33 @@ + + + + + + + + POST Failed + + + + + +Logo + +

POST Failed

+ +

+You have attemped to submit information without the protection +of TLS to this site.
+

+ +

+For the protection of your submission and the integrity of the site, +this is not permitted. Please try accessing the server with a +URL starting with https:// and report this problem +to +

+ + + diff --git a/start.sh b/start.sh new file mode 100755 index 0000000..5de0b4d --- /dev/null +++ b/start.sh @@ -0,0 +1,5 @@ +#!/bin/bash +docker-compose up -d +sleep 60 +docker exec dataverse asadmin --user=admin --passwordfile=/opt/payara/passwordFile deploy /opt/payara/deployments/dataverse.war + diff --git a/startup.sh b/startup.sh new file mode 100755 index 0000000..5c8802e --- /dev/null +++ b/startup.sh @@ -0,0 +1,5 @@ +#!/bin/bash +docker-compose up -d +sleep 10 +docker exec dataverse /etc/init.d/cron restart + diff --git a/update.sh b/update.sh new file mode 100755 index 0000000..03aebfd --- /dev/null +++ b/update.sh @@ -0,0 +1,5 @@ +#! /bin/bash +apt-get update -q && apt-get dist-upgrade -qqy --no-install-recommends && apt-get autoremove -qy && apt-get clean -qy && apt-get autoclean -qy +[ -e /var/run/reboot-required ] && reboot + +# 0 04 * * 4 /distrib/dataverse-docker/update.sh