streampipes-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rie...@apache.org
Subject [incubator-streampipes-website] 02/07: Add initial version of new integrated Apache StreamPipes website+docs
Date Sun, 24 Nov 2019 20:04:56 GMT
This is an automated email from the ASF dual-hosted git repository.

riemer pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-streampipes-website.git

commit 7456edaf3d1114fb37614e6fb2f2d8b5e1622a7c
Author: Dominik Riemer <riemer@fzi.de>
AuthorDate: Mon Nov 18 23:12:43 2019 +0100

    Add initial version of new integrated Apache StreamPipes website+docs
---
 .gitignore                                         |   18 +
 Dockerfile                                         |    9 +
 documentation/docs/.Rhistory                       |    0
 documentation/docs/dev-guide-archetype.md          |  142 +
 documentation/docs/dev-guide-architecture.md       |   61 +
 documentation/docs/dev-guide-configuration.md      |   58 +
 documentation/docs/dev-guide-environment.md        |  112 +
 documentation/docs/dev-guide-event-model.md        |  141 +
 documentation/docs/dev-guide-introduction.md       |   52 +
 documentation/docs/dev-guide-migration.md          |  192 +
 documentation/docs/dev-guide-output-strategies.md  |  346 +
 documentation/docs/dev-guide-processor-sdk.md      |   11 +
 documentation/docs/dev-guide-sink-sdk.md           |   11 +
 documentation/docs/dev-guide-source-sdk.md         |   75 +
 documentation/docs/dev-guide-ssl.md                |   35 +
 documentation/docs/dev-guide-static-properties.md  |  264 +
 .../docs/dev-guide-stream-requirements.md          |  178 +
 .../docs/dev-guide-tutorial-processors.md          |  499 +
 documentation/docs/dev-guide-tutorial-sinks.md     |  246 +
 documentation/docs/dev-guide-tutorial-sources.md   |  282 +
 documentation/docs/faq-common-problems.md          |   73 +
 documentation/docs/license.md                      |    0
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   43 +
 .../documentation.md                               |   32 +
 .../documentation.md                               |   38 +
 .../documentation.md                               |   38 +
 .../documentation.md                               |   38 +
 .../documentation.md                               |   33 +
 .../documentation.md                               |   30 +
 .../documentation.md                               |   32 +
 .../documentation.md                               |   38 +
 .../documentation.md                               |   30 +
 .../documentation.md                               |   35 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   33 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   35 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   40 +
 .../documentation.md                               |   42 +
 .../documentation.md                               |   42 +
 .../documentation.md                               |   43 +
 .../documentation.md                               |   56 +
 .../documentation.md                               |   35 +
 .../documentation.md                               |   42 +
 .../documentation.md                               |   45 +
 .../documentation.md                               |   67 +
 .../documentation.md                               |   55 +
 .../documentation.md                               |   34 +
 .../documentation.md                               |   40 +
 .../documentation.md                               |   49 +
 .../documentation.md                               |   45 +
 .../documentation.md                               |   50 +
 documentation/docs/pipeline-elements.md            |    7 +
 documentation/docs/privacy.md                      |   36 +
 documentation/docs/user-guide-first-steps.md       |  214 +
 documentation/docs/user-guide-installation.md      |  140 +
 documentation/docs/user-guide-introduction.md      |   61 +
 .../docs/user-guide-software-components.md         |  334 +
 documentation/docs/user-guide-tour.md              |  279 +
 documentation/website/blog/2018-06-14-welcome.md   |   11 +
 ...18-06-18-how-to-understand-your-machine-data.md |  105 +
 .../website/blog/2018-09-17-preview-060.md         |   67 +
 .../website/blog/2018-11-24-container-cli.md       |   70 +
 .../website/blog/2019-04-04-release-0610.md        |  106 +
 .../website/blog/2019-05-23-release-0620.md        |   67 +
 .../website/blog/2019-09-05-release-0630.md        |   81 +
 .../website/blog/2019-09-19-release-0640.md        |   51 +
 .../blog/assets/2018-06-18/01_motivation.png       |  Bin 0 -> 75008 bytes
 .../website/blog/assets/2018-06-18/02_anlage.png   |  Bin 0 -> 592089 bytes
 .../blog/assets/2018-06-18/03_architecture.png     |  Bin 0 -> 103391 bytes
 .../website/blog/assets/2018-06-18/04_pipeline.png |  Bin 0 -> 91407 bytes
 .../2018-09-17/streampipes-060-product-tour.png    |  Bin 0 -> 151357 bytes
 .../blog/assets/2018-09-17/streampipes-060.png     |  Bin 0 -> 157867 bytes
 .../website/blog/assets/2018-11-24/files.png       |  Bin 0 -> 42530 bytes
 .../website/blog/assets/2018-11-24/roles.png       |  Bin 0 -> 166171 bytes
 .../website/blog/assets/2018-11-24/variables.png   |  Bin 0 -> 70428 bytes
 .../website/blog/assets/2019-04-04/connect.png     |  Bin 0 -> 122780 bytes
 .../website/blog/assets/2019-04-04/validation.png  |  Bin 0 -> 136644 bytes
 .../website/blog/assets/2019-05-23/connect.png     |  Bin 0 -> 169555 bytes
 .../blog/assets/2019-05-23/pipeline-editor.png     |  Bin 0 -> 151809 bytes
 .../2019-05-23/pipeline-element-documentation.png  |  Bin 0 -> 145710 bytes
 .../website/blog/assets/2019-09-19/memory.png      |  Bin 0 -> 145509 bytes
 .../website/blog/assets/2019-09-19/spconnect.png   |  Bin 0 -> 132647 bytes
 documentation/website/core/Footer.js               |  107 +
 documentation/website/i18n/en.json                 |  724 ++
 documentation/website/package-lock.json            | 9855 ++++++++++++++++++++
 documentation/website/package.json                 |   14 +
 documentation/website/pages/en/community.js        |   67 +
 documentation/website/pages/en/help.js             |   57 +
 documentation/website/pages/en/index.js            |  266 +
 documentation/website/pages/en/users.js            |   49 +
 documentation/website/pages/en/versions.js         |   92 +
 documentation/website/sidebars.json                |   48 +
 documentation/website/siteConfig.js                |  128 +
 documentation/website/static/css/admonition.css    |  177 +
 .../website/static/css/code-block-buttons.css      |   39 +
 documentation/website/static/css/custom.css        |  306 +
 .../website/static/img/archetype/endpoint.png      |  Bin 0 -> 74597 bytes
 .../static/img/archetype/example_pipeline.png      |  Bin 0 -> 189625 bytes
 .../static/img/archetype/project_structure.png     |  Bin 0 -> 80662 bytes
 .../static/img/archetype/run_configuration.png     |  Bin 0 -> 210324 bytes
 .../static/img/archetype/run_env_configuration.png |  Bin 0 -> 133774 bytes
 .../img/architecture/high-level-architecture.png   |  Bin 0 -> 203877 bytes
 .../semantic-description-processor.png             |  Bin 0 -> 98933 bytes
 .../architecture/semantic-description-stream.png   |  Bin 0 -> 96240 bytes
 .../static/img/configuration/config_key.png        |  Bin 0 -> 54925 bytes
 .../static/img/configuration/configuration_1.png   |  Bin 0 -> 114961 bytes
 .../website/static/img/configuration/consul.png    |  Bin 0 -> 123919 bytes
 .../img/dev-guide-output-strategies/os-custom.png  |  Bin 0 -> 73469 bytes
 .../dev-guide-static-properties/sp-collection.png  |  Bin 0 -> 63349 bytes
 .../sp-mapping-nary.png                            |  Bin 0 -> 69711 bytes
 .../sp-mapping-unary.png                           |  Bin 0 -> 60831 bytes
 .../sp-multi-selection.png                         |  Bin 0 -> 65431 bytes
 .../sp-number-parameter-with-range.png             |  Bin 0 -> 62435 bytes
 .../sp-number-parameter.png                        |  Bin 0 -> 58347 bytes
 .../sp-single-selection-remote.png                 |  Bin 0 -> 58852 bytes
 .../sp-single-selection.png                        |  Bin 0 -> 71994 bytes
 .../sp-text-parameter.png                          |  Bin 0 -> 51828 bytes
 documentation/website/static/img/docusaurus.svg    |    1 +
 documentation/website/static/img/favicon.png       |  Bin 0 -> 34049 bytes
 .../website/static/img/favicon/favicon.ico         |  Bin 0 -> 9662 bytes
 .../configuration/01_configuration_overview.png    |  Bin 0 -> 144077 bytes
 .../img/features/configuration/02_status.png       |  Bin 0 -> 166320 bytes
 .../features/configuration/03_change_config.png    |  Bin 0 -> 169040 bytes
 .../static/img/features/dashboard/01_dashboard.png |  Bin 0 -> 1178882 bytes
 .../features/dashboard/02_new_visualisation.png    |  Bin 0 -> 619256 bytes
 .../img/features/dashboard/03_select_gauge.png     |  Bin 0 -> 587208 bytes
 .../img/features/dashboard/04_configure_gauge.png  |  Bin 0 -> 601541 bytes
 .../img/features/dashboard/05_new_gauge_done.png   |  Bin 0 -> 757239 bytes
 .../static/img/features/dashboard/06_add_new.png   |  Bin 0 -> 1144868 bytes
 .../img/features/editor/10_shortcut_buttons.png    |  Bin 0 -> 341129 bytes
 .../img/features/editor/11_compatible_elements.png |  Bin 0 -> 459988 bytes
 .../img/features/editor/12_recommend_elements.png  |  Bin 0 -> 339972 bytes
 .../static/img/features/editor/13_save_adjust.png  |  Bin 0 -> 534505 bytes
 .../img/features/editor/14_save_dialogue.png       |  Bin 0 -> 424745 bytes
 .../editor/15_pipeline_sucessfully_started.png     |  Bin 0 -> 302914 bytes
 .../static/img/features/editor/1_data_streams.png  |  Bin 0 -> 308396 bytes
 .../img/features/editor/2_processing_elements.png  |  Bin 0 -> 287149 bytes
 .../static/img/features/editor/3_data_sinks.png    |  Bin 0 -> 251836 bytes
 .../static/img/features/editor/4_text_filter.png   |  Bin 0 -> 231865 bytes
 .../img/features/editor/5_category_drop_down.png   |  Bin 0 -> 329123 bytes
 .../img/features/editor/6_category_filter.png      |  Bin 0 -> 243478 bytes
 .../img/features/editor/7_connect_elements.png     |  Bin 0 -> 466042 bytes
 .../img/features/editor/8_configure_element.png    |  Bin 0 -> 318625 bytes
 .../img/features/editor/9_connection_error.png     |  Bin 0 -> 362247 bytes
 .../features/file_download/01_file_download.png    |  Bin 0 -> 174370 bytes
 .../features/file_download/02_file_download.png    |  Bin 0 -> 217949 bytes
 .../features/file_download/03_file_downloaded.png  |  Bin 0 -> 203346 bytes
 .../website/static/img/features/home/home.png      |  Bin 0 -> 481717 bytes
 .../website/static/img/features/home/logout.png    |  Bin 0 -> 525699 bytes
 .../website/static/img/features/home/menu.png      |  Bin 0 -> 502059 bytes
 .../website/static/img/features/home/open_menu.png |  Bin 0 -> 369375 bytes
 .../install_elements/01_install_elements.png       |  Bin 0 -> 298475 bytes
 .../install_elements/02_install_absence.png        |  Bin 0 -> 307351 bytes
 .../install_elements/03_installed_absence.png      |  Bin 0 -> 301235 bytes
 .../install_elements/04_abcense_in_editor.png      |  Bin 0 -> 291436 bytes
 .../website/static/img/features/login.png          |  Bin 0 -> 3807049 bytes
 .../manage_pipelines/1_pipeline_overview.png       |  Bin 0 -> 167479 bytes
 .../manage_pipelines/2_start_stop_pipeline.png     |  Bin 0 -> 178123 bytes
 .../manage_pipelines/3_delete_pipeline.png         |  Bin 0 -> 200857 bytes
 .../manage_pipelines/4_pipeline_deleted.png        |  Bin 0 -> 174762 bytes
 .../manage_pipelines/5_pipeline_category.png       |  Bin 0 -> 198884 bytes
 .../features/manage_pipelines/6_add_category.png   |  Bin 0 -> 218655 bytes
 .../features/manage_pipelines/7_new_category.png   |  Bin 0 -> 233820 bytes
 .../manage_pipelines/8_show_new_category.png       |  Bin 0 -> 202205 bytes
 .../img/features/my_elements/01_my_elements.png    |  Bin 0 -> 359345 bytes
 .../features/my_elements/02_my_elements_jsonls.png |  Bin 0 -> 384742 bytes
 .../static/img/features/my_elements/03_jsonld.png  |  Bin 0 -> 960834 bytes
 .../notification/01_select_notification_sink.png   |  Bin 0 -> 321877 bytes
 .../notification/02_configure_notification.png     |  Bin 0 -> 338399 bytes
 .../notification/03_use_event_properties.png       |  Bin 0 -> 348738 bytes
 .../features/notification/04_new_notification.png  |  Bin 0 -> 164679 bytes
 .../img/features/notification/05_mark_read.png     |  Bin 0 -> 169831 bytes
 .../features/notification/06_marked_as_read.png    |  Bin 0 -> 145979 bytes
 .../static/img/features/pipeline_editor.png        |  Bin 0 -> 508371 bytes
 .../configuration/01_configuration_overview.png    |  Bin 0 -> 102276 bytes
 .../configuration/02_change_config.png             |  Bin 0 -> 131473 bytes
 .../img/features_0_62_0/dashboard/01_dashboard.png |  Bin 0 -> 121346 bytes
 .../dashboard/02_new_visualisation.png             |  Bin 0 -> 117696 bytes
 .../features_0_62_0/dashboard/03_select_gauge.png  |  Bin 0 -> 124451 bytes
 .../dashboard/04_configure_gauge.png               |  Bin 0 -> 115686 bytes
 .../dashboard/05_new_gauge_done.png                |  Bin 0 -> 141348 bytes
 .../features_0_62_0/editor/10_connection_error.png |  Bin 0 -> 156593 bytes
 .../features_0_62_0/editor/11_shortcut_buttons.png |  Bin 0 -> 179341 bytes
 .../editor/12_compatible_elements.png              |  Bin 0 -> 194148 bytes
 .../editor/13_recommend_elements.png               |  Bin 0 -> 192328 bytes
 .../img/features_0_62_0/editor/14_save_adjust.png  |  Bin 0 -> 175440 bytes
 .../features_0_62_0/editor/15_save_dialogue.png    |  Bin 0 -> 163445 bytes
 .../editor/16_pipeline_sucessfully_started.png     |  Bin 0 -> 113043 bytes
 .../editor/17_pipeline_information.png             |  Bin 0 -> 132882 bytes
 .../editor/1_PipelineEditor_DataSets.png           |  Bin 0 -> 95707 bytes
 .../editor/2_PipelineEditor_DataStreams.png        |  Bin 0 -> 146667 bytes
 .../editor/3_PipelineEditor_DataProcessors.png     |  Bin 0 -> 121321 bytes
 .../editor/4_PipelineEditor_DataSinks.png          |  Bin 0 -> 108886 bytes
 .../img/features_0_62_0/editor/5_text_filter.png   |  Bin 0 -> 54909 bytes
 .../editor/6_category_drop_down.png                |  Bin 0 -> 141162 bytes
 .../features_0_62_0/editor/7_category_filter.png   |  Bin 0 -> 117204 bytes
 .../features_0_62_0/editor/8_connect_elements.png  |  Bin 0 -> 150320 bytes
 .../features_0_62_0/editor/9_configure_element.png |  Bin 0 -> 142934 bytes
 .../file_download/01_file_download.png             |  Bin 0 -> 80878 bytes
 .../file_download/02_file_download.png             |  Bin 0 -> 99493 bytes
 .../file_download/03_file_download.png             |  Bin 0 -> 88640 bytes
 .../static/img/features_0_62_0/home/home.png       |  Bin 0 -> 342723 bytes
 .../static/img/features_0_62_0/home/open_menu.png  |  Bin 0 -> 277217 bytes
 .../install_elements/01_install_elements.png       |  Bin 0 -> 145499 bytes
 .../02_install_countAggregation.png                |  Bin 0 -> 153560 bytes
 .../03_installed_countAggregation.png              |  Bin 0 -> 150344 bytes
 .../04_countAggregation_in_editor.png              |  Bin 0 -> 125934 bytes
 .../website/static/img/features_0_62_0/login.png   |  Bin 0 -> 501499 bytes
 .../manage_pipelines/1_pipeline_overview.png       |  Bin 0 -> 112379 bytes
 .../manage_pipelines/2_pipeline_information.png    |  Bin 0 -> 132882 bytes
 .../manage_pipelines/3_delete_pipeline.png         |  Bin 0 -> 114315 bytes
 .../manage_pipelines/4_pipeline_deleted.png        |  Bin 0 -> 105179 bytes
 .../manage_pipelines/5_pipeline_category.png       |  Bin 0 -> 111154 bytes
 .../manage_pipelines/6_add_category.png            |  Bin 0 -> 108536 bytes
 .../manage_pipelines/7_new_category.png            |  Bin 0 -> 114286 bytes
 .../manage_pipelines/8_show_new_category.png       |  Bin 0 -> 98449 bytes
 .../features_0_62_0/my_elements/01_my_elements.png |  Bin 0 -> 190389 bytes
 .../my_elements/02_my_elements_jsonid.png          |  Bin 0 -> 193762 bytes
 .../notification/01_select_notification_sink.png   |  Bin 0 -> 140835 bytes
 .../notification/02_configure_notification.png     |  Bin 0 -> 159162 bytes
 .../notification/03_new_notification.png           |  Bin 0 -> 85457 bytes
 .../static/img/features_0_62_0/pipeline_editor.png |  Bin 0 -> 508371 bytes
 documentation/website/static/img/logo.png          |  Bin 0 -> 14358 bytes
 documentation/website/static/img/one.png           |  Bin 0 -> 8855 bytes
 documentation/website/static/img/oss_logo.png      |  Bin 0 -> 4370 bytes
 .../org.streampipes.processor.geo.flink/icon.png   |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 4515 bytes
 .../icon.png                                       |  Bin 0 -> 4515 bytes
 .../icon.png                                       |  Bin 0 -> 4372 bytes
 .../icon.png                                       |  Bin 0 -> 17559 bytes
 .../icon.png                                       |  Bin 0 -> 511 bytes
 .../icon.png                                       |  Bin 0 -> 1854 bytes
 .../icon.png                                       |  Bin 0 -> 4738 bytes
 .../icon.png                                       |  Bin 0 -> 6197 bytes
 .../icon.png                                       |  Bin 0 -> 22150 bytes
 .../icon.png                                       |  Bin 0 -> 10608 bytes
 .../icon.png                                       |  Bin 0 -> 36697 bytes
 .../icon.png                                       |  Bin 0 -> 7429 bytes
 .../icon.png                                       |  Bin 0 -> 19386 bytes
 .../icon.png                                       |  Bin 0 -> 1854 bytes
 .../icon.png                                       |  Bin 0 -> 3172 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 5913 bytes
 .../icon.png                                       |  Bin 0 -> 3659 bytes
 .../icon.png                                       |  Bin 0 -> 2459 bytes
 .../icon.png                                       |  Bin 0 -> 6063 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 2516 bytes
 .../icon.png                                       |  Bin 0 -> 18764 bytes
 .../icon.png                                       |  Bin 0 -> 20685 bytes
 .../icon.png                                       |  Bin 0 -> 710 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 2830 bytes
 .../icon.png                                       |  Bin 0 -> 3172 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 4378 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 11482 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 14453 bytes
 .../icon.png                                       |  Bin 0 -> 19799 bytes
 .../icon.png                                       |  Bin 0 -> 10904 bytes
 .../icon.png                                       |  Bin 0 -> 12949 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3186 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 3947 bytes
 .../org.streampipes.sinks.brokers.jvm.jms/icon.png |  Bin 0 -> 25474 bytes
 .../icon.png                                       |  Bin 0 -> 18630 bytes
 .../icon.png                                       |  Bin 0 -> 21556 bytes
 .../icon.png                                       |  Bin 0 -> 3475 bytes
 .../icon.png                                       |  Bin 0 -> 20411 bytes
 .../icon.png                                       |  Bin 0 -> 48173 bytes
 .../icon.png                                       |  Bin 0 -> 9982 bytes
 .../icon.png                                       |  Bin 0 -> 8890 bytes
 .../icon.png                                       |  Bin 0 -> 25215 bytes
 .../icon.png                                       |  Bin 0 -> 11738 bytes
 .../icon.png                                       |  Bin 0 -> 8525 bytes
 .../icon.png                                       |  Bin 0 -> 14531 bytes
 .../icon.png                                       |  Bin 0 -> 9767 bytes
 .../website/static/img/production-line-gray.png    |  Bin 0 -> 995787 bytes
 .../img/quickstart/elements/01_goto_elements.png   |  Bin 0 -> 305066 bytes
 .../img/quickstart/elements/02_install_sources.png |  Bin 0 -> 332542 bytes
 .../quickstart/elements/03_installed_sources.png   |  Bin 0 -> 310585 bytes
 .../quickstart/elements/04_install_elements.png    |  Bin 0 -> 287478 bytes
 .../quickstart/elements/05_installed_elements.png  |  Bin 0 -> 311860 bytes
 .../img/quickstart/elements/06_install_sinks.png   |  Bin 0 -> 261602 bytes
 .../img/quickstart/elements/07_installed_sinks.png |  Bin 0 -> 282340 bytes
 .../elements/08_pipeline_editor_sources.png        |  Bin 0 -> 230704 bytes
 .../elements/09_pipeline_editor_elements.png       |  Bin 0 -> 193061 bytes
 .../elements/10_pipeline_editor_sinks.png          |  Bin 0 -> 193194 bytes
 .../examples/01_PipelineEditor_DataStreams.png     |  Bin 0 -> 146667 bytes
 .../img/quickstart/examples/01_example1_empty.png  |  Bin 0 -> 209790 bytes
 .../img/quickstart/examples/02_example1_source.png |  Bin 0 -> 243302 bytes
 .../examples/03_example1_pipeline_finished.png     |  Bin 0 -> 246297 bytes
 .../img/quickstart/examples/04_example1_save.png   |  Bin 0 -> 280368 bytes
 .../examples/05_example1_pipeline_started.png      |  Bin 0 -> 213901 bytes
 .../examples/06_example01_live_visualisation.png   |  Bin 0 -> 174816 bytes
 .../examples/07_example01_first_step.png           |  Bin 0 -> 199007 bytes
 .../examples/08_example01_second_step.png          |  Bin 0 -> 202380 bytes
 .../examples/09_example01_third_step.png           |  Bin 0 -> 216688 bytes
 .../quickstart/examples/10_example1_finished.png   |  Bin 0 -> 217952 bytes
 .../img/quickstart/examples/11_example2_start.png  |  Bin 0 -> 219702 bytes
 .../quickstart/examples/12_example2_numerical.png  |  Bin 0 -> 253107 bytes
 .../examples/13_example2_configure_numerical.png   |  Bin 0 -> 285352 bytes
 .../quickstart/examples/14_example2_aggregate.png  |  Bin 0 -> 275546 bytes
 .../examples/15_example2_configure_aggregate.png   |  Bin 0 -> 320690 bytes
 .../quickstart/examples/16_example2_increase.png   |  Bin 0 -> 261436 bytes
 .../examples/17_example2_configure1_increase.png   |  Bin 0 -> 298918 bytes
 .../examples/18_example2_configure2_increase.png   |  Bin 0 -> 302194 bytes
 .../quickstart/examples/19_example2_sequence.png   |  Bin 0 -> 276533 bytes
 .../examples/20_example2_configure_sequence.png    |  Bin 0 -> 285365 bytes
 .../examples/21_example2_notification.png          |  Bin 0 -> 308424 bytes
 .../22_example2_configure_notification.png         |  Bin 0 -> 329655 bytes
 .../examples/23_example2_dashboard_sink.png        |  Bin 0 -> 338525 bytes
 .../img/quickstart/examples/24_example2_save.png   |  Bin 0 -> 297255 bytes
 .../quickstart/examples/25_example2_started.png    |  Bin 0 -> 296071 bytes
 .../examples/26_example2_visualisation.png         |  Bin 0 -> 340611 bytes
 .../examples/27_example2_notification.png          |  Bin 0 -> 156323 bytes
 .../01_PipelineEditor_DataStreams.png              |  Bin 0 -> 167152 bytes
 .../examples_master/02_example1_source.png         |  Bin 0 -> 179021 bytes
 .../03_example1_pipeline_finished.png              |  Bin 0 -> 178791 bytes
 .../examples_master/04_example1_save.png           |  Bin 0 -> 164353 bytes
 .../05_example1_pipeline_started.png               |  Bin 0 -> 97573 bytes
 .../06_example01_live_visualisation.png            |  Bin 0 -> 63277 bytes
 .../examples_master/07_example01_first_step.png    |  Bin 0 -> 92502 bytes
 .../examples_master/08_example01_second_step.png   |  Bin 0 -> 102239 bytes
 .../examples_master/09_example01_third_step.png    |  Bin 0 -> 97636 bytes
 .../examples_master/10_example1_finished.png       |  Bin 0 -> 63922 bytes
 .../examples_master/11_example2_start.png          |  Bin 0 -> 177520 bytes
 .../examples_master/12_example2_numerical.png      |  Bin 0 -> 173569 bytes
 .../13_example2_configure_numerical.png            |  Bin 0 -> 147410 bytes
 .../examples_master/14_example2_aggregate.png      |  Bin 0 -> 180150 bytes
 .../15_example2_configure_aggregate.png            |  Bin 0 -> 171549 bytes
 .../15_example2_configure_aggregate_2.png          |  Bin 0 -> 154612 bytes
 .../examples_master/16_example2_increase.png       |  Bin 0 -> 178321 bytes
 .../17_example2_configure1_increase.png            |  Bin 0 -> 179200 bytes
 .../examples_master/19_example2_sequence.png       |  Bin 0 -> 196734 bytes
 .../20_example2_configure_sequence.png             |  Bin 0 -> 194842 bytes
 .../examples_master/21_example2_notification.png   |  Bin 0 -> 202137 bytes
 .../22_example2_configure_notification.png         |  Bin 0 -> 162731 bytes
 .../examples_master/23_example2_dashboard_sink.png |  Bin 0 -> 204462 bytes
 .../examples_master/25_example2_started.png        |  Bin 0 -> 196661 bytes
 .../static/img/quickstart/interactive-tutorial.png |  Bin 0 -> 216613 bytes
 .../img/quickstart/setup/01_register_user.png      |  Bin 0 -> 794371 bytes
 .../static/img/quickstart/setup/02_user_set_up.png |  Bin 0 -> 813084 bytes
 .../static/img/quickstart/setup/03_login.png       |  Bin 0 -> 1170831 bytes
 .../static/img/quickstart/setup/04_home.png        |  Bin 0 -> 383364 bytes
 .../website/static/img/quickstart/setup/login.png  |  Bin 0 -> 3807049 bytes
 documentation/website/static/img/riemer.png        |  Bin 0 -> 428115 bytes
 .../static/img/slideshow/slideshow-config.png      |  Bin 0 -> 160919 bytes
 .../static/img/slideshow/slideshow-connect.png     |  Bin 0 -> 198388 bytes
 .../static/img/slideshow/slideshow-dashboard.png   |  Bin 0 -> 179762 bytes
 .../static/img/slideshow/slideshow-home.png        |  Bin 0 -> 217760 bytes
 .../img/slideshow/slideshow-installation.png       |  Bin 0 -> 131776 bytes
 .../img/slideshow/slideshow-pipeline-view.png      |  Bin 0 -> 102351 bytes
 .../static/img/slideshow/slideshow-pipeline.png    |  Bin 0 -> 211773 bytes
 .../img/slideshow/slideshow-recommendation.png     |  Bin 0 -> 199717 bytes
 .../website/static/img/sp-logo-right-white.png     |  Bin 0 -> 21453 bytes
 .../website/static/img/streampipes-screenshot.png  |  Bin 0 -> 359634 bytes
 documentation/website/static/img/three.png         |  Bin 0 -> 17876 bytes
 .../img/tutorial-processors/pe-overview-flink.PNG  |  Bin 0 -> 30113 bytes
 .../img/tutorial-processors/pe-rdf-geofencing.PNG  |  Bin 0 -> 70129 bytes
 .../project-structure-processor.PNG                |  Bin 0 -> 13457 bytes
 .../img/tutorial-sinks/project-structure-sinks.png |  Bin 0 -> 36216 bytes
 .../static/img/tutorial-sources/pe-overview.PNG    |  Bin 0 -> 29556 bytes
 .../website/static/img/tutorial-sources/pe-rdf.PNG |  Bin 0 -> 81915 bytes
 .../img/tutorial-sources/project-structure.PNG     |  Bin 0 -> 27916 bytes
 documentation/website/static/img/two.png           |  Bin 0 -> 16668 bytes
 documentation/website/static/img/zehnder.png       |  Bin 0 -> 420534 bytes
 .../website/static/js/code-block-buttons.js        |   47 +
 documentation/website/static/js/custom.js          |   17 +
 .../version-0.55.2/dev-guide-architecture.md       |   62 +
 .../version-0.55.2/dev-guide-environment.md        |  219 +
 .../version-0.55.2/dev-guide-introduction.md       |   53 +
 .../version-0.55.2/dev-guide-processor-sdk.md      |   12 +
 .../version-0.55.2/dev-guide-sink-sdk.md           |   12 +
 .../version-0.55.2/dev-guide-source-sdk.md         |   76 +
 .../dev-guide-tutorial-processors.md               |  478 +
 .../version-0.55.2/dev-guide-tutorial-sources.md   |  219 +
 .../version-0.55.2/faq-common-problems.md          |   60 +
 .../version-0.55.2/pipeline-elements.md            |   10 +
 .../versioned_docs/version-0.55.2/privacy.md       |   37 +
 .../version-0.55.2/user-guide-getting-started.md   |  223 +
 .../version-0.55.2/user-guide-installation.md      |   71 +
 .../version-0.55.2/user-guide-introduction.md      |   62 +
 .../user-guide-processing-elements.md              |  131 +
 .../user-guide-software-components.md              |  335 +
 .../version-0.55.2/user-guide-tour.md              |  288 +
 .../version-0.61.0/dev-guide-archetype.md          |  143 +
 .../version-0.61.0/dev-guide-architecture.md       |   62 +
 .../version-0.61.0/dev-guide-configuration.md      |   60 +
 .../version-0.61.0/dev-guide-environment.md        |  219 +
 .../version-0.61.0/dev-guide-event-model.md        |  142 +
 .../version-0.61.0/dev-guide-migration.md          |  193 +
 .../version-0.61.0/dev-guide-output-strategies.md  |  347 +
 .../version-0.61.0/dev-guide-processor-sdk.md      |   12 +
 .../version-0.61.0/dev-guide-sink-sdk.md           |   12 +
 .../versioned_docs/version-0.61.0/dev-guide-ssl.md |   36 +
 .../version-0.61.0/dev-guide-static-properties.md  |  265 +
 .../dev-guide-stream-requirements.md               |  179 +
 .../dev-guide-tutorial-processors.md               |  503 +
 .../version-0.61.0/dev-guide-tutorial-sources.md   |  283 +
 .../version-0.61.0/faq-common-problems.md          |   74 +
 .../version-0.61.0/pipeline-elements.md            |   10 +
 .../version-0.61.0/user-guide-installation.md      |   74 +
 .../user-guide-processing-elements.md              |  139 +
 .../user-guide-software-components.md              |  335 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   44 +
 .../documentation.md                               |   33 +
 .../documentation.md                               |   39 +
 .../documentation.md                               |   39 +
 .../documentation.md                               |   39 +
 .../documentation.md                               |   34 +
 .../documentation.md                               |   31 +
 .../documentation.md                               |   33 +
 .../documentation.md                               |   39 +
 .../documentation.md                               |   31 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   34 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   38 +
 .../documentation.md                               |   37 +
 .../documentation.md                               |   41 +
 .../documentation.md                               |   43 +
 .../documentation.md                               |   43 +
 .../documentation.md                               |   44 +
 .../documentation.md                               |   57 +
 .../documentation.md                               |   36 +
 .../documentation.md                               |   43 +
 .../documentation.md                               |   46 +
 .../documentation.md                               |   68 +
 .../documentation.md                               |   56 +
 .../documentation.md                               |   35 +
 .../documentation.md                               |   41 +
 .../documentation.md                               |   50 +
 .../documentation.md                               |   46 +
 .../documentation.md                               |   51 +
 .../version-0.62.0/pipeline-elements.md            |    8 +
 .../version-0.62.0/user-guide-tour.md              |  280 +
 .../version-0.63.0/dev-guide-archetype.md          |  143 +
 .../version-0.63.0/dev-guide-configuration.md      |   59 +
 .../version-0.63.0/dev-guide-environment.md        |  113 +
 .../dev-guide-tutorial-processors.md               |  500 +
 .../version-0.63.0/dev-guide-tutorial-sinks.md     |  247 +
 .../version-0.63.0/dev-guide-tutorial-sources.md   |  283 +
 .../version-0.63.0/user-guide-first-steps.md       |  215 +
 .../version-0.63.0/user-guide-installation.md      |  141 +
 .../version-0.64.0/dev-guide-archetype.md          |  153 +
 .../version-0.64.0/dev-guide-environment.md        |  113 +
 .../dev-guide-tutorial-processors.md               |  511 +
 .../version-0.64.0/dev-guide-tutorial-sinks.md     |  256 +
 .../version-0.64.0/dev-guide-tutorial-sources.md   |  283 +
 .../version-0.55.2-sidebars.json                   |   34 +
 .../version-0.61.0-sidebars.json                   |   48 +
 .../version-0.62.0-sidebars.json                   |   96 +
 .../version-0.63.0-sidebars.json                   |   95 +
 documentation/website/versions.json                |    7 +
 nginx_config/default.conf                          |   10 +
 nginx_config/nginx.conf                            |   37 +
 package.json                                       |   35 +
 website/css/bootstrap.min.css                      |    7 +
 website/css/buttons.css                            |   44 +
 website/css/custom.css                             |  331 +
 website/css/font-awesome.min.css                   |    4 +
 website/css/footer.css                             |   22 +
 website/css/nav.css                                |   72 +
 website/css/page-banner.css                        |   17 +
 website/css/responsive.css                         |  129 +
 website/css/team.css                               |   25 +
 website/download.ejs                               |  268 +
 website/features.ejs                               |  248 +
 website/fonts/FontAwesome.otf                      |  Bin 0 -> 134808 bytes
 website/fonts/fontawesome-webfont.eot              |  Bin 0 -> 165742 bytes
 website/fonts/fontawesome-webfont.svg              | 2671 ++++++
 website/fonts/fontawesome-webfont.ttf              |  Bin 0 -> 165548 bytes
 website/fonts/fontawesome-webfont.woff             |  Bin 0 -> 98024 bytes
 website/fonts/fontawesome-webfont.woff2            |  Bin 0 -> 77160 bytes
 website/getinvolved.ejs                            |   36 +
 website/images/screenshot.png                      |  Bin 0 -> 359634 bytes
 website/img/about/dutz.jpg                         |  Bin 0 -> 140407 bytes
 website/img/about/riemer.png                       |  Bin 0 -> 428115 bytes
 website/img/about/tex.png                          |  Bin 0 -> 401565 bytes
 website/img/about/wiener.png                       |  Bin 0 -> 249219 bytes
 website/img/about/zehnder.png                      |  Bin 0 -> 420534 bytes
 website/img/bg-img/bg-index.jpg                    |  Bin 0 -> 541592 bytes
 website/img/favicon/apple-touch-icon-114x114.png   |  Bin 0 -> 12731 bytes
 website/img/favicon/apple-touch-icon-120x120.png   |  Bin 0 -> 12215 bytes
 website/img/favicon/apple-touch-icon-144x144.png   |  Bin 0 -> 9514 bytes
 website/img/favicon/apple-touch-icon-152x152.png   |  Bin 0 -> 21383 bytes
 website/img/favicon/apple-touch-icon-57x57.png     |  Bin 0 -> 3780 bytes
 website/img/favicon/apple-touch-icon-60x60.png     |  Bin 0 -> 4004 bytes
 website/img/favicon/apple-touch-icon-72x72.png     |  Bin 0 -> 3230 bytes
 website/img/favicon/apple-touch-icon-76x76.png     |  Bin 0 -> 5680 bytes
 website/img/favicon/favicon-128.png                |  Bin 0 -> 3329 bytes
 website/img/favicon/favicon-16x16.png              |  Bin 0 -> 471 bytes
 website/img/favicon/favicon-196x196.png            |  Bin 0 -> 34049 bytes
 website/img/favicon/favicon-32x32.png              |  Bin 0 -> 864 bytes
 website/img/favicon/favicon-96x96.png              |  Bin 0 -> 4246 bytes
 website/img/favicon/favicon.ico                    |  Bin 0 -> 34494 bytes
 website/img/favicon/mstile-144x144.png             |  Bin 0 -> 9514 bytes
 website/img/favicon/mstile-150x150.png             |  Bin 0 -> 51834 bytes
 website/img/favicon/mstile-310x150.png             |  Bin 0 -> 90271 bytes
 website/img/favicon/mstile-310x310.png             |  Bin 0 -> 214934 bytes
 website/img/favicon/mstile-70x70.png               |  Bin 0 -> 3329 bytes
 website/img/integrations/Aggregation_Icon_HQ.png   |  Bin 0 -> 1854 bytes
 website/img/integrations/And_Icon_HQ.png           |  Bin 0 -> 2516 bytes
 website/img/integrations/Counter_Icon_HQ.png       |  Bin 0 -> 4738 bytes
 website/img/integrations/NeuralNetwork.png         |  Bin 0 -> 4515 bytes
 .../img/integrations/Numerical_Filter_Icon_HQ.png  |  Bin 0 -> 3172 bytes
 website/img/integrations/alarm_light.png           |  Bin 0 -> 6272 bytes
 website/img/integrations/couchdb_icon.png          |  Bin 0 -> 48173 bytes
 website/img/integrations/elasticsearch_icon.png    |  Bin 0 -> 20411 bytes
 website/img/integrations/email.png                 |  Bin 0 -> 8525 bytes
 website/img/integrations/field-hasher-icon.png     |  Bin 0 -> 19799 bytes
 website/img/integrations/field_converter.png       |  Bin 0 -> 11482 bytes
 website/img/integrations/file_icon.png             |  Bin 0 -> 2031 bytes
 website/img/integrations/gauge_icon.png            |  Bin 0 -> 19795 bytes
 website/img/integrations/hadoop-icon.png           |  Bin 0 -> 35282 bytes
 website/img/integrations/html_icon.png             |  Bin 0 -> 7429 bytes
 website/img/integrations/increase-icon.png         |  Bin 0 -> 18764 bytes
 website/img/integrations/influx.png                |  Bin 0 -> 9982 bytes
 website/img/integrations/jms_logo.png              |  Bin 0 -> 25474 bytes
 website/img/integrations/kafka_logo.png            |  Bin 0 -> 18630 bytes
 website/img/integrations/math-icon.png             |  Bin 0 -> 22150 bytes
 website/img/integrations/mysql.png                 |  Bin 0 -> 6477 bytes
 website/img/integrations/postgres.png              |  Bin 0 -> 8890 bytes
 website/img/integrations/rabbitmq-icon.png         |  Bin 0 -> 21556 bytes
 website/img/integrations/slack-icon.png            |  Bin 0 -> 9767 bytes
 website/img/integrations/statistics-icon.png       |  Bin 0 -> 23118 bytes
 website/img/open_source/icon-docker.png            |  Bin 0 -> 33551 bytes
 website/img/open_source/icon-docs.png              |  Bin 0 -> 26773 bytes
 website/img/open_source/icon-github.png            |  Bin 0 -> 41956 bytes
 website/img/open_source/icon-mail.png              |  Bin 0 -> 29649 bytes
 website/img/open_source/icon-slack.png             |  Bin 0 -> 5067 bytes
 website/img/open_source/icon-stackoverflow.png     |  Bin 0 -> 46849 bytes
 website/img/open_source/icon-twitter.png           |  Bin 0 -> 4298 bytes
 website/img/screenshots/connect-customize.png      |  Bin 0 -> 79922 bytes
 website/img/screenshots/connect-marketplace.png    |  Bin 0 -> 146874 bytes
 website/img/screenshots/live-dashboard.png         |  Bin 0 -> 87581 bytes
 website/img/screenshots/pipeline-customization.png |  Bin 0 -> 112000 bytes
 website/img/screenshots/pipeline-details.png       |  Bin 0 -> 82822 bytes
 website/img/screenshots/pipeline-editor-2.png      |  Bin 0 -> 137687 bytes
 .../img/screenshots/pipeline-editor-sources.png    |  Bin 0 -> 95393 bytes
 website/img/screenshots/pipeline-editor.png        |  Bin 0 -> 135643 bytes
 .../screenshots/pipeline-element-marketplace.png   |  Bin 0 -> 76371 bytes
 website/img/screenshots/pipeline-overview.png      |  Bin 0 -> 84199 bytes
 website/img/screenshots/start-pipeline.png         |  Bin 0 -> 84681 bytes
 website/img/slideshow/slideshow-config.png         |  Bin 0 -> 160919 bytes
 website/img/slideshow/slideshow-connect.png        |  Bin 0 -> 198388 bytes
 website/img/slideshow/slideshow-dashboard.png      |  Bin 0 -> 179762 bytes
 website/img/slideshow/slideshow-home.png           |  Bin 0 -> 217760 bytes
 website/img/slideshow/slideshow-installation.png   |  Bin 0 -> 131776 bytes
 website/img/slideshow/slideshow-pipeline-view.png  |  Bin 0 -> 102351 bytes
 website/img/slideshow/slideshow-pipeline.png       |  Bin 0 -> 211773 bytes
 website/img/slideshow/slideshow-recommendation.png |  Bin 0 -> 199717 bytes
 website/img/sp-logo-color.png                      |  Bin 0 -> 16928 bytes
 website/img/sp-logo-white-right.png                |  Bin 0 -> 21453 bytes
 website/img/streampipes-screenshot.png             |  Bin 0 -> 124034 bytes
 .../streampipes-high-level-architecture-blue.png   |  Bin 0 -> 338665 bytes
 .../streampipes-high-level-architecture.png        |  Bin 0 -> 338816 bytes
 website/img/technology/streampipes-tech-stack.png  |  Bin 0 -> 149139 bytes
 website/img/usecases/production-line.png           |  Bin 0 -> 479552 bytes
 website/index.ejs                                  |   23 +
 website/js/bootstrap.min.js                        |    7 +
 website/js/jquery-2.2.4.min.js                     |    4 +
 website/js/scripts.js                              |   20 +
 website/mailinglists.ejs                           |   85 +
 website/media.ejs                                  |   87 +
 website/meetus.ejs                                 |   46 +
 website/partials/_breadcumb.ejs                    |   11 +
 website/partials/_footer.ejs                       |   22 +
 website/partials/_index-architecture.ejs           |   22 +
 website/partials/_index-description.ejs            |   74 +
 website/partials/_index-support-links.ejs          |   50 +
 website/partials/_index-teaser.ejs                 |   57 +
 website/partials/_meta.ejs                         |   37 +
 website/partials/_nav.ejs                          |   95 +
 website/team.ejs                                   |   85 +
 website/technology.ejs                             |   56 +
 website/usecases.ejs                               |   86 +
 623 files changed, 34042 insertions(+)

diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..e6635f7
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,18 @@
+.DS_Store
+
+node_modules
+
+lib/core/metadata.js
+lib/core/MetadataBlog.js
+
+website/translated_docs
+website/build/
+website/yarn.lock
+website/node_modules
+website/i18n/*
+!website/i18n/en.json
+
+.idea/
+
+build/
+documentation/website/build/
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..4f22020
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,9 @@
+FROM nginx
+
+COPY documentation/website/build/streampipes-docs/ /usr/share/nginx/html/docs
+COPY build/ /usr/share/nginx/html
+
+COPY nginx_config/nginx.conf /etc/nginx/nginx.conf
+COPY nginx_config/default.conf /etc/nginx/conf.d/default.conf
+
+RUN chown -R nginx:nginx /usr/share/nginx/html/
diff --git a/documentation/docs/.Rhistory b/documentation/docs/.Rhistory
new file mode 100644
index 0000000..e69de29
diff --git a/documentation/docs/dev-guide-archetype.md b/documentation/docs/dev-guide-archetype.md
new file mode 100644
index 0000000..0832356
--- /dev/null
+++ b/documentation/docs/dev-guide-archetype.md
@@ -0,0 +1,142 @@
+---
+id: dev-guide-archetype
+title: Start Developing
+sidebar_label: Start Developing
+---
+
+In this tutorial we explain how you can use the Maven archetypes to develop your own StreamPipes processors and sinks.
+We use IntelliJ in this tutorial, but it works with any IDE of your choice.
+
+## Prerequisites
+You need to have Maven installed, further you need an up and running StreamPipes installation on your development computer.
+To ease the configuration of environment variables, we use the IntelliJ [env Plugin](https://plugins.jetbrains.com/plugin/7861-envfile).
+Install this in IntelliJ. The development also works without the plugin, then you have to set the environment variables manually instead of using the env configuration file.
+
+## Create Project
+To create a new project, we provide multiple Maven Archteypes.
+Currently, we have archetypes for the JVM and Flink wrappers, each for processors and sinks.
+The commands required to create a new pipeline element project can be found below. Make sure that you select a version compatible with your StreamPipes installation.
+Copy the command into your terminal to create a new project.
+The project will be created in the current folder.
+First, the ``groupId`` of the resulting Maven artifact must be set.
+We use ``groupId``: ``org.example`` and ``artifactId``: ``ExampleProcessor``.
+You can keep the default values for the other settings, confirm them by hitting enter.
+Now, a new folder with the name ``ExampleProcessor`` is generated.
+
+
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-jvm  \
+  -DarchetypeVersion={sp.version}
+```
+<details class="info">
+    <summary>Select: [Processors / Sinks] [JVM / Flink]</summary>
+
+## Processors JVM
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-jvm  \
+  -DarchetypeVersion=0.64.0
+```
+
+## Processors Flink
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-processors-flink  \
+  -DarchetypeVersion=0.64.0
+```
+
+## Sinks JVM
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-sinks-jvm  \
+  -DarchetypeVersion=0.64.0
+```
+
+## Sinks Flink
+```bash
+mvn archetype:generate                              	 	     \
+  -DarchetypeGroupId=org.streampipes          			         \
+  -DarchetypeArtifactId=streampipes-archetype-pe-sinks-flink  \
+  -DarchetypeVersion=0.64.0
+```
+</details>
+
+
+## Edit Processor
+Open the project in your IDE.
+If everything worked, the structure should look similar to the following image.
+The *config* package contains all the configuration parameters of your processors / sinks.
+In the *main* package, it is defined which processors / sinks you want to activate and the *pe.processor.example* package contains three classes with the application logic.
+For details, have a look at the other parts of the Developer Guide, where these classes are explained in more depth.
+
+<img src="/img/archetype/project_structure.png" width="30%" alt="Project Structure">
+
+Open the class *Example* and edit the ``onEvent`` method to print the incoming event, log it to the console and send it to the next component without changing it.
+
+```java
+@Override
+public void onEvent(Event event, SpOutputCollector collector) {
+    // Print the incoming event on the console
+    System.out.println(in);
+
+    // Hand the incoming event to the output collector without changing it.
+    collector.onEvent(in);
+}
+```
+
+## Start Processor
+Before the processor can be started, you need to edit the *env* file in the *development* folder.
+Replace all local hosts in this file with the IP address or DNS name of your computer.
+This is relevant to make the mapping between the services running in Docker and your component running in the local IDE.
+After all updates are updated, this file is used by the envfile plugin to provide configuration parameters to the pipeline element.
+Alternatively, environment variables can also be set on your host or IDE.
+Now start the project by clicking on **(Run -> Edit Configuration)**.
+Add a new configuration in the Configuration menu by clicking on the + sign and select **Application**.
+Name the configuration *ExampleProcessor* and select the *Init* class as the 'Main class'.
+Then set *ExampleProcessor* in 'Use classpath of module'.
+
+
+As the last step, switch to the tab *EnvFile* and load the env file.
+Click on 'Enable EnvFile' to activate it and add the just edited env file by clicking on the + sign.
+Save all the changes by clicking *Apply*.
+Now you can start the processor.
+
+<div class="my-carousel">
+    <img src="/img/archetype/run_configuration.png" alt="Configuration View">
+    <img src="/img/archetype/run_env_configuration.png" alt="Environment Configuration View">
+</div>
+
+To check if the service is up and running, open the browser on *'localhost:6666'*. The machine-readable escription of the processor should be visible as shown below.
+
+<img src="/img/archetype/endpoint.png" width="90%" alt="Project Structure">
+
+
+<div class="admonition error">
+<div class="admonition-title">Common Problems</div>
+<p>
+If the service description is not shown on 'localhost:6666', you might have to change the port address.
+This needs to be done in the configuration of your service, further explained in the configurations part of the developer guide.
+
+If the service does not show up in the StreamPipes installation menu, click on 'MANAGE ENDPOINTS' and add 'http://<span></span>YOUR_IP_OR_DNS_NAME:6666'.
+Use the IP or DNS name you provided in the env file.
+After adding the endpoint, a new processor with the name *Example* should show up.
+</p>
+</div>
+
+Now you can go to StreamPipes.
+Your new processor *'Example'* should now show up in the installation menu.
+Install it, then switch to the pipeline view and create a simple pipeline that makes use of your newly created processor.
+In case you opened the StreamPipes installation for the first time, it should have been automatically installed during the setup process.
+
+<img src="/img/archetype/example_pipeline.png" width="80%" alt="Project Structure">
+
+Start this pipeline.
+Now you should see logging messages in your console and, once you've created a visualisation, you can also see the resulting events of your component in StreamPipes.
+
+Congratulations, you have just created your first processor!
+From here on you can start experimenting and implement your own algorithms.
diff --git a/documentation/docs/dev-guide-architecture.md b/documentation/docs/dev-guide-architecture.md
new file mode 100644
index 0000000..1ad0869
--- /dev/null
+++ b/documentation/docs/dev-guide-architecture.md
@@ -0,0 +1,61 @@
+---
+id: dev-guide-architecture
+title: Architecture
+sidebar_label: Architecture
+---
+
+The following picture illustrates the high-level architecture of StreamPipes:
+
+<img src="/img/architecture/high-level-architecture.png" alt="High Level Architecture of StreamPipes">
+
+Users mainly interact (besides other UI components) with the _Pipeline Editor_ to create stream processing pipelines based on data streams, data processors and data sinks.
+These reusable pipeline elements are provided by self-contained _pipeline element containers_, each of them having a semantic description that specifies their characteristics (e.g., input, output and required user input for data processors).
+Each pipeline element container has a REST endpoint that provides these characteristics as a JSON-LD document.
+
+Pipeline element containers are built using one of several provided _wrappers_.
+Wrappers abstract from the underlying runtime stream processing framework.
+Currently, the StreamPipes framework provides wrappers for Apache Flink, Esper and algorithms running directly on the JVM.
+
+The _pipeline manager_ manages the definition and execution of pipelines.
+When creating pipelines, the manager continuously matches the pipeline against its semantic description and provides user guidance in form of recommendations.
+Once a pipeline is started, the pipeline manager invokes the corresponding pipeline element containers.
+The container prepares the actual execution logic and submits the program to the underlying execution engine, e.g., the program is deployed in the Apache Flink cluster.
+
+Pipeline elements exchange data using one or more message brokers and protocols (e.g., Kafka or MQTT).
+StreamPipes does not rely on a specific broker or message format, but negotiates suitable brokers based on the capabilities of connected pipeline elements.
+
+Thus, StreamPipes provides a higher-level abstraction of existing stream processing technology by leveraging domain experts to create streaming analytics pipelines in a self-service manner.
+
+## Semantic description
+Pipeline elements in StreamPipes are meant to be reusable:
+
+* Data processors and data sink are generic (or domain-specific) elements that express their requirements and are able to operate on any stream that satisfies these requirements.
+* Data processors and data sinks can be manually configured by offering possible configuration parameters which users can individually define when creating pipelines.
+* Data streams can be connected to any data processor or data sink that matches the capabilities of the stream.
+
+When users create pipelines by connecting a data stream with a data processor (or further processors), the pipeline manager _matches_ the input stream of a data processor against its requirements.
+This matching is performed based on the _semantic description of each element.
+The semantic description (technically an RDF graph serialized as JSON-LD) can be best understood by seeing it as an envelope around a pipeline element.
+It only provides metadata information, while we don't rely on any RDF at runtime for exchanging events between pipeline elements.
+While RDF-based metadata ensures good understanding of stream capabilities, lightweight event formats at runtime (such as JSON or Thrift) ensure fast processing of events.
+
+Let's look at an example stream that produces a continuous stream of vehicle positions as illustrated below:
+
+<img src="/img/architecture/semantic-description-stream.png" alt="Semantic description of data streams">
+
+While the runtime layer produces plain JSON by submitting actual values of the position and the vehicle's plate number, the description layer describes various characteristics of the stream:
+For instance, it defines the event schema (including, besides the data type and the runtime name of each property also a more fine-grained meaning of the property), quality aspects (e.g., the measurement unit of a property or the frequency) and the grounding (e.g., the format used at runtime and the communication protocol used for transmitting events).
+
+The same accounts for data processors and data sinks:
+
+<img src="/img/architecture/semantic-description-processor.png" alt="Semantic description of data processor">
+
+Data processors (and, with some differences, data sinks) are annotated by providing metadata information on their required input and output.
+For instance, we can define minimum schema requirements (such as geospatial coordinates that need to be provided by any stream that is connected to a processor), but also required (minimum or maximum) quality levels and supported transport protocols and formats.
+In addition, required configuration parameters users can define during the pipeline definition process are provided by the semantic description.
+
+Once new pipeline elements are imported into StreamPipes, we store all information provided by the description layer in a central repository and use this information to guide useres through the pipeline definition process.
+
+Don't worry - you will never be required to model RDF by yourself.
+Our SDK provides convenience methods that help creating the description automatically.
+
diff --git a/documentation/docs/dev-guide-configuration.md b/documentation/docs/dev-guide-configuration.md
new file mode 100644
index 0000000..cd140ef
--- /dev/null
+++ b/documentation/docs/dev-guide-configuration.md
@@ -0,0 +1,58 @@
+---
+id: dev-guide-configuration
+title: Configuration
+sidebar_label: Configuration
+---
+
+On this page we explain how the StreamPipes configuration works.
+StreamPipes allows the individual services (pipeline element containers and third-party services) to store configuration parameters in a distributed key-value store.
+This has the advantage that individual services do not need to store any configurations on the local file system, enabling us to run containers anywhere.
+As a key-value store we use [Consul](https://www.consul.io/), which is an essential service for all our services.
+
+<img src="/img/configuration/consul.png" width="50%" alt="Semantic description of data processor">
+
+
+## Edit Configurations
+All services in StreamPipes can have configuration parameters.
+You can either change them in the consul user interface (which is by default running on port 8500) or directly in the StreamPipes Configurations Page.
+Once a new  pipeline element container is started, it is registered in Consul and the parameters can be edited in the configuration page, as shown below.
+To store changes in Consul, the update button must be clicked.
+
+<div class="my-carousel">
+    <img src="/img/configuration/configuration_1.png" alt="Configuration View">
+</div>
+
+## Configuration for Developers
+We provide a Configurations API for the use of configuration parameters in your services.
+Each processing element project has a “config” package [[Example]](https://github.com/streampipes/streampipes-pipeline-elements/tree/dev/streampipes-sinks-internal-jvm/src/main/java/org/streampipes/sinks/internal/jvm/config).
+This package usually contains two classes.
+One containing unique keys for the configuration values and one containing the getter and setter methods to access these values.
+For the naming of configuration keys, we recommend to use “SP” as a prefix.
+As we explain later, it is possible to set default configurations as environment variables, this prefix makes them unique on your server.
+A configuration entry needs a unique config key. For this key, a value can be specified containing the configuration, like for example the port number of the service.
+For each configuration, a description explaining the parameter can be provided, further the data type must be specified and whether it is a password or not.
+Below, the schema of a configuration item is shown on the left and an example of a port configuration on the right.
+
+<img src="/img/configuration/config_key.png" width="80%" alt="Semantic description of data processor">
+
+As a developer, you can add as many new configurations to services as you wish, but there are some that are required for all processing element containers.
+Those are **the host**, **the port**, and **the name** of the service.
+
+## Default Values
+You can provide default values for the configurations, which are used when a configuration is read for the first time.
+The first option is to register a configuration parameter in the Config class.
+This is a fallback value, which is used if nothing else is defined.
+Since this value is static, we offer a second option.
+It is possible to provide a default value by setting an environment variable.
+In this case, the convention is that the key of a configuration parameter must be used as the environment variable.
+Now, this value is used instead of the value defined in the Config class.
+During development, the configuration values often need to be changed for debugging purposes, therefore we provide an .env file in all processing element projects and archetypes.
+This file can be used by your IDE to set the environment variables. (e.g., [Intellij Plugin](https://plugins.jetbrains.com/plugin/7861-envfile))
+When you need to change the variable at runtime, you can do this in the StreamPipes configurations as explained before.
+Those changes take effect immediately without the need of a container restart.
+
+<div class="admonition warning">
+<div class="admonition-title">Installed pipeline elements</div>
+<p>Be cautious, when the configuration is used in the semantic description of a processing element which is already installed in StreamPipes, you have to reload this element in StreamPipes (my elements -> reload).
+   In addition, changes might affect already running pipelines.</p>
+</div>
diff --git a/documentation/docs/dev-guide-environment.md b/documentation/docs/dev-guide-environment.md
new file mode 100644
index 0000000..882def1
--- /dev/null
+++ b/documentation/docs/dev-guide-environment.md
@@ -0,0 +1,112 @@
+---
+id: dev-guide-development-environment
+title: Development Environment
+sidebar_label: Development Environment
+---
+
+In this section, we describe our recommended minimum setup for locally setting up a development instance of StreamPipes needed to develop, run and test new pipeline elements.
+
+## IDE & required dev tools
+StreamPipes does not have specific requirements on the IDE - so feel free to choose the IDE of your choice.
+The only requirements in terms of development tools are that you have Java 8 and Maven installed.
+
+## Docker-based local StreamPipes instance
+In order to quickly test developed pipeline elements without needing to install all services required by StreamPipes, we provide a CLI tool that allows you to selectively start StreamPipes components.
+The CLI tool allows to switch to several templates (based on docker-compose) depending on the role. For instance, if you are developing a pipeline element, use the template ``pe-developer``. This will start backend and ui components in a Docker container, while you can easily test your pipeline element in your IDE.
+
+For now, we refer to the Github Readme for instructions on how to use the CLI tool: [https://github.com/streampipes/streampipes-cli](https://github.com/streampipes/streampipes-cli)
+
+## Starter projects
+
+Now, once you've started the development instance, you are ready to develop your very first pipeline element.
+Instead of starting from scratch, we recommend using our provided maven archetypes:
+
+### Maven archetypes
+
+Create the Maven archetype as described in the [Getting Started](dev-guide-archetype) guide.
+
+### Starting from scratch
+
+In order to develop a new pipeline element from scratch, you need to create a new Maven project and import the following dependencies:
+
+<details class="info">
+<summary>pom.xml</summary>
+```
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-container-standalone</artifactId>
+    <version>0.64.0</version>
+</dependency>
+
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-sdk</artifactId>
+    <version>0.64.0</version>
+</dependency>
+
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-vocabulary</artifactId>
+    <version>0.64.0</version>
+</dependency>
+
+<!-- This dependency needs to be imported if you plan to develop a new data processor or data sink using the Apache Flink wrapper -->
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-wrapper-flink</artifactId>
+    <version>0.64.0</version>
+</dependency>
+
+<!-- This dependency needs to be imported if you plan to develop a new data processor or data sink which is running directly on the JVM -->
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-wrapper-standalone</artifactId>
+    <version>0.64.0</version>
+</dependency>
+
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-dataformat-json</artifactId>
+    <version>0.64.0</version>
+</dependency>
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-dataformat-smile</artifactId>
+     <version>0.64.0</version>
+</dependency>
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-dataformat-cbor</artifactId>
+     <version>0.64.0</version>
+</dependency>
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-dataformat-fst</artifactId>
+     <version>0.64.0</version>
+</dependency>
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-messaging-jms</artifactId>
+     <version>0.64.0</version>
+</dependency>
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-messaging-kafka</artifactId>
+     <version>0.64.0</version>
+</dependency>
+```
+</details>
+
+The following three dependencies are mandatory:
+
+* `streampipes-container-standalone`, defines that we are going to create a new pipeline element where the description will be accessible through an embedded web server.
+* `streampipes-sdk` imports the SDK which provides many convencience functions to create new pipeline elements.
+* `streampipes-vocabulary` imports various RDF vocabularies which are used by the SDK to auto-generate the semantic description of pipeline elements.
+
+The following three dependencies might be optional depending on the pipeline element type you plan to create:
+
+*  `streampipes-wrapper-flink` should be used in case you plan to connect a new data processor or data sink that uses Apache Flink for processing events at runtime.
+*  `streampipes-wrapper-standalone` should be used in case you plan to connect a new data processor or data sink that does not use an external processing engine. Events are directly processed in a single-host fashion.
+
+
+Finally, this dependency will provide abstract classes to define data sources and streams.
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-event-model.md b/documentation/docs/dev-guide-event-model.md
new file mode 100644
index 0000000..cc4ade6
--- /dev/null
+++ b/documentation/docs/dev-guide-event-model.md
@@ -0,0 +1,141 @@
+---
+id: dev-guide-event-model
+title: SDK Guide: Event Model
+sidebar_label: Event Model
+---
+
+## Introduction
+
+The 0.61.0 release of StreamPipes introduces a new event model that replaces the ``Map`` representation of events at runtime. This guide explains the usage of the new event model to manipulate runtime events for data processors and data sink.
+
+## Reference
+
+This guide assumes that you are already familiar with the basic setup of [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md).
+
+### Property Selectors
+
+In most cases, fields that are subject to be transformed by pipeline elements are provided by the assigned ``MappingProperty`` (see the guide on [static properties](dev-guide-static-properties.md).
+
+Mapping properties return a ``PropertySelector`` that identifies a field based on (i) the **streamIndex** and (ii) the runtime name of the field.
+Let's assume we have an event with the following structure:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "sensor1",
+    "running" : true,
+    "location" : {"latitude" : 34.4, "longitude" : -47},
+    "lastValues" : [45, 22, 21]
+}
+```
+
+In addition, we assume that a data processor exists (with one input node) that converts the temperature value (measured in degrees celsius) to a degree fahrenheit value.
+In this case, a mapping property (selected by the pipeline developer in the StreamPipes UI) would link to the ``temperature`` field of the event.
+
+The mapping property value will be the ``PropertySelector`` of the temperature value, which looks as follows:
+
+```
+s0::temperature
+```
+
+``s0`` identifies the stream (in this case, only one input streams exist, but as data processors might require more than one input stream, a stream identifier is required), while the appendix identifies the runtime name.
+
+Note: If you add a new field to an input event, you don't need to provide the selector, you can just assign the runtime name as defined by the [output strategy](dev-guide-output-strategies.md).
+
+### Reading Fields
+
+You can get a field from an event by providing the corresponding selector:
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  PrimitiveField temperatureField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsPrimitive();
+  }
+
+```
+
+Similarly, if your mapping property links to a nested property, use
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  NestedField nestedField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsNested();
+  }
+
+```
+
+and for a list-based field:
+
+```java
+
+@Override
+  public void onEvent(Event event, SpOutputCollector out) {
+
+  ListField listField = event.getFieldBySelector(PROPERTY_SELECTOR).getAsList();
+  }
+
+```
+
+### Parsing Fields
+
+#### Primitive Fields
+
+A ``PrimitiveField`` contains convenience methods to directly cast a field to the target datatype:
+
+```java
+
+// parse the value as a float datatype
+Float temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsFloat();
+
+// or do the same with a double datatype
+Double temperatureValue = event.getFieldBySelector(temperatureSelector).getAsPrimitive().getAsDouble();
+
+// extracting a string
+String deviceId = event.getFieldBySelector(deviceIdSelector).getAsPrimitive().getAsString();
+
+// this also works for extracting fields from nested fields:
+Double latitude = event.getFieldBySelector(latitudeSelector).getAsPrimitive().getAsDouble();
+
+// extracting boolean values
+Boolean running = event.getFieldBySelector(runningSelector).getAsPrimitive().getAsBoolean();
+```
+
+In rare cases, you might want to receive a field directly based on the runtime name as follows:
+
+```java
+Double temperature = event.getFieldByRuntimeName("temperature").getAsPrimitive().getAsDouble();
+```
+
+#### List Fields
+
+Lists can also be retrieved by providing the corresponding selector and can automatically be parsed to a list of primitive datatypes:
+
+```java
+
+List<Integer> lastValues = event.getFieldBySelector(lastValueSelector).getAsList().parseAsSimpleType(Integer.class);
+
+```
+
+(coming soon: parsing complex lists)
+
+
+### Adding/Updating Fields
+
+Primitive fields can easily be added to an event by providing the runtime name and the object:
+
+```java
+
+    // add a primitive field with runtime name "city" and value "Karlsruhe"
+    event.addField("city", "Karlsruhe");
+
+    // remove the field "temperature" from the event
+    event.removeFieldBySelector(temperatureSelector);
+
+    // add a new field 
+    event.addField("fahrenheit", 48);
+```
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-introduction.md b/documentation/docs/dev-guide-introduction.md
new file mode 100644
index 0000000..465eb34
--- /dev/null
+++ b/documentation/docs/dev-guide-introduction.md
@@ -0,0 +1,52 @@
+---
+id: dev-guide-introduction
+title: Introduction
+sidebar_label: Introduction
+---
+
+StreamPipes is an open platform which can be easily extended at runtime by adding new data streams, data processors and data sinks.
+When developing new components, usually multiple elements, called _pipeline elements_, are bundled into a self-contained _pipeline element container_. This container is deployed as a standalone microservice. This service is self-descriptive and exposes its description (see below for a detailed explanation) to the StreamPipes management backend. Once the service is deployed, it can be installed using the StreamPipes UI and all elements provided by this service are ready to be used as part [...]
+
+To ease the extension of StreamPipes, we provide a powerful Software Development Kit (SDK) that allows you to define new pipeline elements for your StreamPipes instance.
+
+## Concepts
+
+In this section, we briefly describe the main concepts of StreamPipes. Although it might give an abstract overview, we are sure that many concepts will be more clear once you've created your very first pipeline element yourself. So check out our tutorials!
+
+### Data Stream
+
+A _Data Stream_ is the main concept to describe the source of a pipeline. _DataStreams_ consist of
+an RDF _description_ (which will be generated automatically when using the SDK) and a runtime
+_implementation_. The description includes information on the _schema_ of a data stream, e.g., measurement properties the payload of a stream provides.
+Furthermore, the description contains information on the _grounding_, such as the transport format (e.g., JSON) and transport protoocol (e.g., MQTT or Kafka).
+One or more data streams are assigned to a _Data Source_ to improve discovery of existing streams.
+
+### Data Processor
+_Data Processors_ transform on or more input event streams to an output event stream. Data processors can be stateless (e.g., filter operations on every event of an input stream) or stateful (e.g., time-based aggregations using sliding windows).
+Similar to data streams, processors consist of an RDF _description_ and a corresponding _implementation_. The description is being used by the StreamPipes backend in order to determine the compatibility of a data processor and an input event stream and includes information the required minimum event schema as well as required user input and the definition of the output event stream.
+
+The implementation of a data processor can be defined using a set of provided _runtime wrappers_. These wrappers define where computation logic actually takes place once a pipeline was started. We currently provide runtime wrappers for various Big Data processing engines (e.g., Apache Flink) and lightweight standalone processors.
+
+### Data Sink
+The concept of _Data Sinks_ is very similar to the concept of data processors with the exception that sinks do not produce any output streams.
+Therefore, sinks are used in StreamPipes to mark the end of a pipeline and reflect 3rd party applications, notifications or dashboard components.
+
+### Static Property
+Some data processors or data sinks might require input from users when pipelines are created using these elements.
+For instance, a generic filter component might require information on the filter operation and a threshold value.
+Such required user input can be modeled by defining _static properties_. Static properties can be defined in many ways, e.g., plain text input, selections (e.g., radio buttons) or can be linked to separately stored domain knowledge.
+The SDK contains many convenient functions that help you defining static properties.
+
+### Output Strategy
+As mentioned above, data processors also define the output event schema. However, as data processors in StreamPipes are often generic and can therefore be linked to any event stream that matches the input requirement of a data processor, the exact output schema is not known in the development phase when a data processor is defined.
+Therefore, data processors define their output using _output strategies_. Such strategies describe the transformation process, i.e., how an input stream is transformed to an output stream.
+Multiple pre-defined output strategies exist that you can choose depending on the behaviour of a data processor.
+For instance, the output schema of a filter component is usually similar to the input schema, so you would use a _KeepOutputStrategy_.
+On the other hand, an enrichment component usually adds additional properties to an input schema - this can be defined using a _AppendOutputStrategy_.
+Sometimes you want to let the user define the output schema. In this case, a _CustomOutputStrategy_ can be defined.
+
+## Clients
+
+As stated in the beginning, pipeline element containers are deployed as self-contained microservices. The client types describes the environment this service is running in.
+Currently supported clients are _standalone_, which defines a standalone service that contains both the description and implementation part (which is often submitted to a computing cluster prior to pipeline execution) in addition to an embedded Jetty web server which creates a fat jar file, and _embedded_, which creates a war file that can be imported into an existing application server.
+
diff --git a/documentation/docs/dev-guide-migration.md b/documentation/docs/dev-guide-migration.md
new file mode 100644
index 0000000..7f5d385
--- /dev/null
+++ b/documentation/docs/dev-guide-migration.md
@@ -0,0 +1,192 @@
+---
+id: dev-guide-migration
+title: Migration Guide v0.61
+sidebar_label: Migration Guide
+---
+
+Version 0.61.0 of StreamPipes comes with an improved event model. This model makes it easier to support more complex data streams (e.g., streams containing nested properties and lists) and includes features such as automatically resolving conflicts when merging two event streams.
+
+If you are only using the pipeline elements that are included in StreamPipes, you only need to update the element description (My Elements -> Update).
+However, if you've already developed your own pipeline elements, some code changes are required to make your elements work with versions >= 0.61.0.
+
+<div class="admonition info">
+<div class="admonition-title">Don't be afraid!</div>
+<p>Although this guide may look long and complicated, migrating pipeline elements is quite simple. Once you've understood how the new event model works, you'll be able to migrate an element within just a few minutes.</p>
+</div>
+
+## Migrating Data Processors
+
+### JVM Wrapper
+
+#### Engine Class
+
+1. Make the class **implement** ``EventProcessor`` instead of **extending** ``StandaloneEventProcessorEngine``
+
+```java
+// old
+public class MyProcessor extends StandaloneEventProcessorEngine<MyProcessorParameters> { ... }
+
+// new
+public class MyProcessor implements EventProcessor<MyProcessorParameters>{ ... }
+```
+
+2. Change the signature of the ``onInvocation`` method:
+
+```java
+// old
+  @Override
+  public void onInvocation(MyProcessorParameters params, DataProcessorInvocation dataProcessorInvocation) {
+
+  }
+
+// new
+  @Override
+  public void onInvocation(MyProcessorParameters params, SpOutputCollector spOutputCollector, EventProcessorRuntimeContext runtimeContext) throws SpRuntimeException {
+
+  }
+```
+
+3. Change the signature of the ``onEvent`` method:
+
+```java
+// old
+  @Override
+  public void onEvent(Map<String, Object> in, String s, SpOutputCollector out) {
+
+  }
+
+// new
+  @Override
+  public void onEvent(Event in, SpOutputCollector out) throws SpRuntimeException {
+
+  }
+```
+
+4. Refactor the ``onEvent``method to the new event model:
+
+* Replace all ``collector.onEvent()`` calls to the collector with ``collector.collect()``
+* Replace all fields that are accessed from a **MappingProperty** with the new **FieldSelector** from the Event class, e.g.:
+
+```java
+// old
+  @Override
+  public void onEvent(Map<String, Object> in, String s, SpOutputCollector out) {
+     String value = String.valueOf(in.get(valueField));
+  }
+
+// new
+  @Override
+  public void onEvent(Event in, SpOutputCollector out) throws SpRuntimeException {
+    String value = in.getFieldBySelector(valueField).getAsPrimitive().getAsString();
+  }
+```
+
+See the documentation on the event class for further details.
+
+5. Forward an ``Event`` instead of a ``Map`` to the collector. If needed, create a new instance of the ``Event`` class.
+
+6. Modify the modifications of the input event, e.g.:
+
+```java
+// old
+  @Override
+  public void onEvent(Map<String, Object> in, String s, SpOutputCollector out) {
+     in.put("new", "a new field");
+     out.onEvent(in);
+  }
+
+// new
+  @Override
+  public void onEvent(Event in, SpOutputCollector out) throws SpRuntimeException {
+    in.addField("new", "a new field");
+    out.collect(in);
+  }
+```
+
+#### Controller
+
+1. In the ``onInvocation`` method, use a method reference instead of the lambda expression as return type:
+
+```java
+// old
+return new ConfiguredEventProcessor<>(params, () -> new MyProcessor(params));
+
+// new
+return new ConfiguredEventProcessor<>(params, MyProcessor::new);
+```
+
+2. Change the signature of the ``onInvocation`` method:
+
+```java
+// old
+  @Override
+  public ConfiguredEventProcessor<MyParameters> onInvocation(DataProcessorInvocation graph) { ... }
+
+// new
+  @Override
+  public ConfiguredEventProcessor<MyParameters> onInvocation(DataProcessorInvocation graph, ProcessingElementParameterExtractor extractor) { ... }
+```
+
+3. If existing, remove the ``fromExtractor`` method and use the provided ``ProcessingElementParameterExtractor``
+
+
+### Flink Wrapper
+
+
+## Migrating Data Sinks
+
+### JVM Wrapper
+
+#### Sink Class
+
+1. Make the class **implement** ``EventSink`` instead of **extending** ``EventSink``
+
+```java
+// old
+public class MySink extends EventSink<MySinkParameters> { ... }
+
+// new
+public class MySink implements EventSink<MySinkParameters>{ ... }
+```
+
+2. If present, remove the constructor that includes the parameter class.
+
+3. Change the ``bind`` method to ``onInvocation`` as follows:
+
+```java
+// old
+  @Override
+  public void bind(DemonstratorValveParameters parameters) throws SpRuntimeException { ... }
+
+// new
+  @Override
+  public void onInvocation(DemonstratorValveParameters parameters, EventSinkRuntimeContext runtimeContext) throws SpRuntimeException { ... }
+```
+
+4. Change the signature of the ``onEvent`` method
+
+```java
+// old
+  @Override
+  public void onEvent(Map<String, Object> event, String sourceInfo) {
+
+// new
+  @Override
+  public void onEvent(Event event) {
+```
+
+5. If necessary, adapt your logic to use the new event object.
+
+6. Rename the ``discard`` method to ``onDetach``.
+
+#### Controller
+
+1. In the ``onInvocation`` method, use a method reference instead of the lambda expression as return type:
+
+```java
+// old
+return new ConfiguredEventSink<>(params, () -> new MySink(params));
+
+// new
+return new ConfiguredEventSink<>(params, MySink::new);
+```
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-output-strategies.md b/documentation/docs/dev-guide-output-strategies.md
new file mode 100644
index 0000000..5489f77
--- /dev/null
+++ b/documentation/docs/dev-guide-output-strategies.md
@@ -0,0 +1,346 @@
+---
+id: dev-guide-output-strategies
+title: SDK Guide: Output Strategies
+sidebar_label: Output Strategies
+---
+
+## Introduction
+In StreamPipes, output strategies determine the output of a data processor.
+As the exact input schema of a processor is usually not yet known at development time (as processors can be connected with any stream that matches their requirements), output strategies are a concept to define how an input data stream is transformed to an output data stream.
+
+The following reference describes how output strategies can be defined using the SDK.
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://www.github.com/streampipes/streampipes-pipeline-elements-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/streampipes/pe/examples/jvm/outputstrategy/">Github</a>.</p>
+</div>
+
+## Reference
+
+The methods described below to create static properties are available in the ``ProcessingElementBuilder`` class and are usually used in the ``declareModel`` method of the controller class.
+
+As follows, we will use the following example event to explain how output strategies define the output of a data processor:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "1"
+
+}
+```
+
+### Keep Output
+
+A ``KeepOutputStrategy`` declares that the output event schema will be equal to the input event schema.
+In other terms, the processor does not change the schema, but might change the values of event properties.
+
+A keep output strategy can be defined as follows:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".keep", "Keep output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.anyProperty())
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // declaring a keep output strategy
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+
+```
+
+According to the example above, the expected output event schema of the example input event would be:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "1"
+
+}
+```
+
+Data processors that perform filter operations (e.g., filtering temperature values that are above a given threshold) are a common example for using keep output strategies.
+
+
+### Fixed Output
+
+A ``FixedOutputStrategy`` declares that the data processor itself provides the event schema. The output schema does not depend on the input event.
+
+Fixed output strategies need to provide the event schema they produce at development time:
+
+```java
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".fixed", "Fixed output example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.anyProperty())
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // the fixed output strategy provides the schema
+            .outputStrategy(OutputStrategies.fixed(EpProperties.timestampProperty("timestamp"),
+                    EpProperties.doubleEp(Labels.from("avg", "Average value", ""), "avg", SO.Number)))
+
+            .build();
+  }
+
+```
+
+In this example, we declare that the output schema always consists of two fields (``timestamp`` and ``avg``).
+
+Therefore, an output event should look like:
+
+```json
+{
+    "timestamp" : 1234556,
+    "avg" : 36.0
+}
+```
+
+
+### Append Output
+
+An ``AppendOutputStrategy`` appends additional fields to a schema of an incoming event stream. For instance, data processors that perform enrichment operations usually make use of append output strategies.
+
+Similar to the fixed output strategy, the additional fields must be provided at development time in the controller method as follows:
+
+```java
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".append", "Append output example", "")
+
+            // boilerplate code not relevant here, see above
+
+            // declaring an append output
+            .outputStrategy(OutputStrategies.append(EpProperties.integerEp(Labels.from("avg",
+                    "The average value", ""), "avg", SO.Number)))
+
+            .build();
+  }
+```
+
+In this case, the output event would have an additional field ``avg``:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : "1",
+    "avg" : 123.0
+
+}
+```
+
+### Custom Output
+
+In some cases, pipeline developers using the StreamPipes UI should be able to manually select fields from an input event schema. For such use cases, a ``CustomOutputStrategy`` can be used:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".custom", "Custom output example", "")
+
+            // boilerplate code not relevant here, see above
+
+            // declaring a custom output
+            .outputStrategy(OutputStrategies.custom())
+
+            .build();
+  }
+
+```
+
+If a data processor defines a custom output strategy, the customization dialog in the pipeline editor will show a dialog to let users select the fields to keep:
+
+<img src="/img/dev-guide-output-strategies/os-custom.png" width="80%" alt="Number Parameter">
+
+Taking our example, and assuming that the user selects both the ``timestamp`` and the ``temperature`` the expected output event should look like this:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0
+}
+```
+
+How do we know which fields were selected once the data processor is invoked? Use the proper method from the extractor in the ``onInvocation`` method:
+
+```java
+@Override
+  public ConfiguredEventProcessor<DummyParameters> onInvocation(DataProcessorInvocation graph, ProcessingElementParameterExtractor extractor) {
+
+    List<String> outputSelectors = extractor.outputKeySelectors();
+
+    return new ConfiguredEventProcessor<>(new DummyParameters(graph), DummyEngine::new);
+  }
+```
+
+### Transform Output
+
+A ``TransformOutputStrategy`` declares that one or more fields of an incoming event stream are transformed. Transformations can be applied to the datatype of the property, the runtime name of the property, or any other scheam-related declaration such as measurement units.
+
+#### Static Transform Operations
+
+Static transform operations do not depend on any user input (at pipeline development time) in order to know how to transform a field of an incoming event schema.
+
+Let's say our data processor transforms strings (that are actually a number) to a number datatype. In this case, we can use a static transform output strategy:
+
+```java
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".transform", "Transform output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from
+                            ("str", "The date property as a string", ""), PropertyScope.NONE)
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // static transform operation
+            .outputStrategy(OutputStrategies.transform(TransformOperations
+                    .staticDatatypeTransformation("str", Datatypes.Long)))
+
+            .build();
+  }
+
+```
+
+Note the mapping property that we use to determine which field of the input event should be transformed.
+
+The expected output event would look like this:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temperature" : 37.0,
+    "deviceId" : 1
+}
+```
+
+#### Dynamic Transform Operations
+
+Sometimes, user input depends on the exact transform output. Let's take a field renaming processor as an example, which lets the user rename a field from an input event schema to another field name.
+For such use cases, we can use a ``DynamicTransformOperation``:
+
+```java
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".transform", "Transform output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from
+                            ("str", "The date property as a string", ""), PropertyScope.NONE)
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // the text input to enter the new runtime name
+            .requiredTextparameter(Labels.from("new-runtime-name", "New Runtime Name", ""))
+
+            // static transform operation
+            .outputStrategy(OutputStrategies.transform(TransformOperations
+                    .dynamicRuntimeNameTransformation("str", "new-runtime-name")))
+
+            .build();
+  }
+
+```
+
+For dynamic transform operations, an additional identifier that links to another static property can be assigned and later be fetched in the ``onInvocation`` method.
+
+Assuming we want to rename the field ``temperature`` to ``temp``, the resulting output event should look like this:
+
+```json
+{
+    "timestamp" : 1234556,
+    "temp" : 37.0,
+    "deviceId" : 1
+}
+```
+
+### Custom Transform Output
+
+Finally, in some cases the output schema cannot be described at pipeline development time. For these (usually rare) cases, a ``CustomTransformOutput`` strategy can be used.
+
+In this case, a callback function will be invoked in the controller class just after a user has filled in any static properties and clicks on ``Save`` in the pipeline editor.
+
+To define a custom transform output, we need to implement an interface in the controller class:
+
+```java
+public class CustomTransformOutputController extends
+        StandaloneEventProcessingDeclarer<DummyParameters> implements
+        ResolvesContainerProvidedOutputStrategy<DataProcessorInvocation, ProcessingElementParameterExtractor> {
+
+
+@Override
+  public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException {
+
+  }
+}
+```
+
+In addition, the output strategy must be declared in the ``declareModel`` method:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.outputstrategy" +
+            ".customtransform", "Custom transform output example example", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.stringReq(), Labels.from
+                            ("str", "The date property as a string", ""), PropertyScope.NONE)
+                    .build())
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+
+            // declare a custom transform output
+            .outputStrategy(OutputStrategies.customTransformation())
+
+            .build();
+  }
+
+```
+
+Once a new pipeline using this data processor is created and the configuration is saved, the ``resolveOutputStrategy`` method will be called, so that an event schema can be provided based on the given configuration. An extractor instance (see the guide on static properties) is available to extract the selected static properties and the connected event stream.
+
+```java
+@Override
+  public EventSchema resolveOutputStrategy(DataProcessorInvocation processingElement, ProcessingElementParameterExtractor parameterExtractor) throws SpRuntimeException {
+    return new EventSchema(Arrays
+            .asList(EpProperties
+                    .stringEp(Labels.from("runtime", "I was added at runtime", ""), "runtime", SO.Text)));
+  }
+```
+
+In this example, the output event schema should look like this:
+
+```json
+{
+    "runtime" : "Hello world!"
+}
+```
+
diff --git a/documentation/docs/dev-guide-processor-sdk.md b/documentation/docs/dev-guide-processor-sdk.md
new file mode 100644
index 0000000..fd36374
--- /dev/null
+++ b/documentation/docs/dev-guide-processor-sdk.md
@@ -0,0 +1,11 @@
+---
+id: dev-guide-sdk-guide-processors
+title: SDK Guide: Data Processors
+sidebar_label: SDK Guide: Data Processors
+---
+
+## Project Setup
+(coming soon, please check the [tutorial](dev-guide-tutorial-processors) to learn how to define data processors)
+
+## SDK reference
+The complete SDK reference for defining data processors will follow soon. Please check the SDK's Javadoc for now!
diff --git a/documentation/docs/dev-guide-sink-sdk.md b/documentation/docs/dev-guide-sink-sdk.md
new file mode 100644
index 0000000..a3a26c9
--- /dev/null
+++ b/documentation/docs/dev-guide-sink-sdk.md
@@ -0,0 +1,11 @@
+---
+id: dev-guide-sdk-guide-sinks
+title: SDK Guide: Data Sinks
+sidebar_label: SDK Guide: Data Sinks
+---
+
+## Project Setup
+(coming soon, please check the [tutorial](dev-guide-tutorial-processors) to learn how to define sinks)
+
+## SDK reference
+The complete SDK reference for defining data sinks will follow soon. Please check the SDK's Javadoc for now!
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-source-sdk.md b/documentation/docs/dev-guide-source-sdk.md
new file mode 100644
index 0000000..e2f53f9
--- /dev/null
+++ b/documentation/docs/dev-guide-source-sdk.md
@@ -0,0 +1,75 @@
+---
+id: dev-guide-sdk-guide-sources
+title: SDK Guide: Data Sources
+sidebar_label: SDK Guide: Data Sources
+---
+
+## Project Setup
+
+Open the IDE of your choice and create a new maven project. Add the following dependencies to your pom file:
+
+<details class="info">
+<summary>pom.xml</summary>
+```xml
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-container-standalone</artifactId>
+    <version>0.50.0</version>
+</dependency>
+
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-sdk</artifactId>
+    <version>0.50.0</version>
+</dependency>
+
+<!-- This dependency needs to be imported if you plan to connect a new data stream with StreamPipes -->
+<dependency>
+    <groupId>org.streampipes</groupId>
+    <artifactId>streampipes-sources</artifactId>
+    <version>0.50.0</version>
+</dependency>
+```
+</details>
+
+Now you need to create three new classes:
+
+First, create a new class `MyStreamController` as follows:
+```java
+import org.streampipes.model.SpDataStream;
+import org.streampipes.model.graph.DataSourceDescription;
+import org.streampipes.sources.AbstractAlreadyExistingStream;
+
+public class MyStreamController extends AbstractAlreadyExistingStream {
+
+  @Override
+  public SpDataStream declareModel(DataSourceDescription sep) {
+
+  }
+}
+```
+Next, create a new class `MyStreamSource` as follows:
+
+```java
+import org.streampipes.container.declarer.EventStreamDeclarer;
+import org.streampipes.container.declarer.SemanticEventProducerDeclarer;
+import org.streampipes.model.graph.DataSourceDescription;
+
+import java.util.List;
+
+public class MyStreamSource implements SemanticEventProducerDeclarer {
+
+    @Override
+    public DataSourceDescription declareModel() {
+      return null;
+    }
+
+    @Override
+    public List<EventStreamDeclarer> getEventStreams() {
+        return null;
+    }
+}
+```
+
+## SDK Reference
+The complete SDK reference for defining data processors will follow soon. Please check the SDK's Javadoc for now!
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-ssl.md b/documentation/docs/dev-guide-ssl.md
new file mode 100644
index 0000000..a6f3453
--- /dev/null
+++ b/documentation/docs/dev-guide-ssl.md
@@ -0,0 +1,35 @@
+---
+id: dev-guide-ssl
+title: Use SSL
+sidebar_label: Use SSL
+---
+
+In this tutorial we explain how you can use SSL Certificates to provide transport layer security between your Browser and the Streampipes Backend.
+
+## Prerequisites
+You need a valid Certificate consisting of a Private and a Public Key. Both Keys must be in PEM Format. Please note that your Private Key should never be shared, otherwise the communication can not be considered secure.
+
+## Edit docker-compose.yml
+In order to use SSL you have to open port 443 on the nginx Service. Incoming insecure Traffic on Port 80 will automatically be reroutet to Port 443.
+
+The Environment-Variable NGINX_SSL must be set to "true".
+
+Finally you have to inject the Certificates into the Docker-Container. In the example below, the Certificates are placed in the directory /etc/ssl/private/ on the host machine. Please change the path according to the place where the Certificates are located on your machine. The path after the colon should not be changed!
+```yaml
+[...]
+  nginx:
+    image: streampipes/ui
+    ports:
+      - "80:80"
+      - "443:443"
+    environment:
+      - NGINX_SSL=true
+    volumes:
+      - /etc/ssl/private/private.pem:/etc/nginx/ssl/ssl.pem
+      - /etc/ssl/private/public.pem:/etc/nginx/ssl/cert.pem
+    depends_on:
+      - backend
+    networks:
+      spnet:
+[...]
+```
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-static-properties.md b/documentation/docs/dev-guide-static-properties.md
new file mode 100644
index 0000000..ca9da38
--- /dev/null
+++ b/documentation/docs/dev-guide-static-properties.md
@@ -0,0 +1,264 @@
+---
+id: dev-guide-static-properties
+title: SDK Guide: Static Properties
+sidebar_label: Static Properties
+---
+
+## Introduction
+Static properties represent user-faced parameters that are provided by pipeline developers.
+Processing elements can specify required static properties, which will render different UI views in the pipeline editor.
+
+The following reference describes how static properties can be defined using the SDK.
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://github.com/streampipes/streampipes-pipeline-elements-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/streampipes/pe/examples/jvm/staticproperty">Github</a>.</p>
+</div>
+
+## Reference
+
+The methods described below to create static properties are available in the ``ProcessingElementBuilder`` and ``DataSinkBuilder`` classes and are usually used in the ``declareModel`` method of the controller class.
+
+### Mapping property
+
+In StreamPipes, processing elements usually operate on fields of an event stream. For instance, a filter processor operates on a specific field from an input stream (e.g., a field measuring the temperature).
+Typically, pipeline developers should select the exact field where the operations is applied upon by themselves.
+As this field is not yet known at pipeline element development time (as it is defined by the pipeline developer in the pipeline editor), mapping properties serve to map a stream requirement to a specific field from the actual input event stream.
+
+### Unary mapping property
+
+A unary mapping property maps a stream requirement to an actual field of an event stream. Therefore, the ``StreamRequirementsBuilder`` provides the opportunity to directly add a mapping property based along with a property requirement:
+
+```java
+.requiredStream(StreamRequirementsBuilder.
+    create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(),
+            Labels.from("mp-key", "My Mapping", ""),
+            PropertyScope.NONE)
+    .build())
+```
+
+This leads to a selection dialog in the pipeline element customization which provides the user with a selection of all event properties (fields) from the input stream that match the specified property requirement:
+
+<img src="/img/dev-guide-static-properties/sp-mapping-unary.png" width="80%" alt="Text">
+
+At invocation time, the value can be extracted in the ``onInvocation`` method as follows:
+
+```java
+// Extract the mapping property value
+String mappingPropertySelector = extractor.mappingPropertyValue("mp-key");
+```
+
+Note that this method returns a ``PropertySelector``, which can be used by the event model to extract the actual value of this field.
+
+### N-ary mapping property
+
+N-ary mapping properties work similar to unary mapping properties, but allow the mapping of one requirement to multiple event properties matching the requirement:
+
+```java
+.requiredStream(StreamRequirementsBuilder.
+    create()
+    .requiredPropertyWithNaryMapping(EpRequirements.numberReq(),
+            Labels.from("mp-key", "My Mapping", ""),
+            PropertyScope.NONE)
+    .build())
+```
+
+This renders the following selection, where users can select more than one matching event property:
+
+<img src="/img/dev-guide-static-properties/sp-mapping-nary.png" width="80%" alt="Text">
+
+The following snippet returns a list containing the property selectors of all event properties that have been selected:
+
+```java
+// Extract the mapping property value
+List<String> mappingPropertySelectors = extractor.mappingPropertyValues("mp-key");
+```
+
+### Free-Text Parameters
+
+A free-text parameter requires the pipeline developer to enter a single value - which can be a string or another primitive data type.
+The input of free-text parameters can be restricted to specific value ranges or can be linked to the value set of a connected input data stream.
+
+#### Text Parameters
+
+A text parameter lets the user enter a string value. The following code line in the controller class
+
+```java
+.requiredTextParameter(Labels.from(SP_KEY, "Example Name", "Example Description"))
+```
+
+leads to the following input dialog in the pipeline editor:
+
+<img src="/img/dev-guide-static-properties/sp-text-parameter.png" width="80%" alt="Text">
+
+Users can enter any value that will be converted to a string datatype. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor``
+
+```java
+String textParameter = extractor.singleValueParameter(SP_KEY, String.class);
+```
+
+#### Number parameters
+
+A number parameter lets the user enter a number value, either a floating-point number or an integer:
+
+```java
+// create an integer parameter
+.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description"))
+
+// create a float parameter
+.requiredFloatParameter(Labels.from("float-key", "Float Parameter", "Example Description"))
+
+```
+
+leads to the following input dialog in the pipeline editor only accepting integer values:
+
+<img src="/img/dev-guide-static-properties/sp-number-parameter.png" width="80%" alt="Number Parameter">
+
+The pipeline editor performs type validation and ensures that only numbers can be added by the user. To receive the entered value in the ``onInvocation`` method, use the following method from the ``ParameterExtractor``
+
+```java
+// Extract the integer parameter value
+Integer integerParameter = extractor.singleValueParameter(SP_KEY, Integer.class);
+
+// Extract the float parameter value
+Float floatParameter = extractor.singleValueParameter("float-key", Float.class);
+
+```
+
+#### Numbers with value specification
+
+You can also specify the value range of a number-based free text parameter:
+
+```java
+// create an integer parameter with value range
+.requiredIntegerParameter(Labels.from(SP_KEY, "Integer Parameter", "Example Description"), 0, 100, 1)
+
+```
+
+which renders the following input field:
+
+<img src="/img/dev-guide-static-properties/sp-number-parameter-with-range.png" width="80%" alt="Number Parameter">
+
+Receive the entered value in the same way as a standard number parameter.
+
+#### Free-text parameters linked to an event property
+
+
+### Single-Value Selections
+
+Single-value selections let the user select from a pre-defined list of options.
+A single-value selection requires to select exactly one option.
+
+```java
+.requiredSingleValueSelection(Labels.from("id", "Example Name", "Example Description"),
+    Options.from("Option A", "Option B", "Option C"))
+
+```
+
+Single-value selections will be rendered as a set of radio buttons in the pipeline editor:
+
+<img src="/img/dev-guide-static-properties/sp-single-selection.png" width="80%" alt="Number Parameter">
+
+To extract the selected value, use the following method from the parameter extractor:
+
+```java
+// Extract the selected value
+String selectedSingleValue = extractor.selectedSingleValue("id", String.class);
+```
+
+<div class="admonition tip">
+<div class="admonition-title">Declaring options</div>
+<p>Sometimes, you may want to use an internal name that differs from the display name of an option.
+For that, you can use the method Options.from(Tuple2<String, String>) and the extractor method selectedSingleValueInternalName.</p>
+</div>
+
+
+
+### Multi-Value Selections
+
+Multi-value selections let the user select from a pre-defined list of options, where multiple or no option might be selected.
+
+```java
+.requiredMultiValueSelection(Labels.from("id", "Example Name", "Example Description"),
+    Options.from("Option A", "Option B", "Option C"))
+
+```
+
+Multi-value selections will be rendered as a set of checkboxes in the pipeline editor:
+
+<img src="/img/dev-guide-static-properties/sp-multi-selection.png" width="80%" alt="Number Parameter">
+
+To extract the selected value, use the following method from the parameter extractor:
+
+```java
+// Extract the selected value
+List<String> selectedMultiValue = extractor.selectedMultiValues("id", String.class);
+```
+
+### Domain Concepts
+
+(coming soon...)
+
+### Collections
+
+You can also define collections based on other static properties.
+
+```java
+// create a collection parameter
+.requiredParameterAsCollection(Labels.from("collection", "Example Name", "Example " +
+        "Description"), StaticProperties.stringFreeTextProperty(Labels
+        .from("text-property","Text","")))
+```
+
+While the items of the collection can be provided in the same way as the underlying static property, the UI provides buttons to add and remove items to the collections.
+
+<img src="/img/dev-guide-static-properties/sp-collection.png" width="80%" alt="Number Parameter">
+
+To extract the selected values from the collection, use the following method from the parameter extractor:
+
+```java
+// Extract the text parameter value
+List<String> textParameters = extractor.singleValueParameterFromCollection("collection", String.class);
+```
+
+### Runtime-resolvable selections
+
+In some cases, the options of selection parameters are not static, but depend on other values or might change at runtime. In this case, you can use runtime-resolvable selections.
+
+First, let your controller class implement ``ResolvesContainerProvidedOptions``:
+
+```java
+public class RuntimeResolvableSingleValue extends
+     StandaloneEventProcessingDeclarer<DummyParameters> implements ResolvesContainerProvidedOptions { ... }
+```
+
+Next, define the parameter in the ``declareModel`` method:
+
+```java
+// create a single value selection parameter that is resolved at runtime
+    .requiredSingleValueSelectionFromContainer(Labels.from("id", "Example Name", "Example " +
+            "Description"))
+```
+
+Finally, implement the method ``resolveOptions``, which will be called at runtime once the processor is used:
+
+```java
+  @Override
+  public List<RuntimeOptions> resolveOptions(String requestId, EventProperty linkedEventProperty) {
+    return Arrays.asList(new RuntimeOptions("I was defined at runtime", ""));
+  }
+```
+
+The UI will render a single-value parameter based on the options provided at runtime:
+
+<img src="/img/dev-guide-static-properties/sp-single-selection-remote.png" width="80%" alt="Number Parameter">
+
+The parameter extraction does not differ from the extraction of static single-value parameters.
+
+<div class="admonition info">
+<div class="admonition-title">Multi-value selections</div>
+<p>Although this example shows the usage of runtime-resolvable selections using single value selections, the same also works for multi-value selections!</p>
+</div>
+
+
diff --git a/documentation/docs/dev-guide-stream-requirements.md b/documentation/docs/dev-guide-stream-requirements.md
new file mode 100644
index 0000000..c639a4e
--- /dev/null
+++ b/documentation/docs/dev-guide-stream-requirements.md
@@ -0,0 +1,178 @@
+---
+id: dev-guide-stream-requirements
+title: SDK Guide: Stream Requirements
+sidebar_label: Stream Requirements
+---
+
+## Introduction
+
+Data processors and data sinks can define ``StreamRequirements``. Stream requirements allow pipeline elements to express requirements on an incoming event stream that are needed for the element to work properly.
+Once users create pipelines in the StreamPipes Pipeline Editor, these requirements are verified against the connected event stream.
+By using this feature, StreamPipes ensures that only pipeline elements can be connected that are syntactically and semantically valid.
+
+This guide covers the creation of stream requirements. Before reading this section, we recommend that you make yourself familiar with the SDK guide on [data processors](dev-guide-processor-sdk.md) and [data sinks](dev-guide-sink-sdk.md).
+
+<div class="admonition tip">
+<div class="admonition-title">Code on Github</div>
+<p>For all examples, the code can be found on <a href="https://www.github.com/streampipes/streampipes-pipeline-elements-examples/tree/dev/streampipes-pipeline-elements-examples-processors-jvm/src/main/java/org/streampipes/pe/examples/jvm/requirements/">Github</a>.</p>
+</div>
+
+## The StreamRequirementsBuilder
+
+Stream requirements can be defined in the ``Controller`` class of the pipeline element. Start with a method body like this:
+
+```java
+
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+The ``StreamRequirementsBuilder`` class provides methods to add stream requirements to a pipeline element.
+
+## Requirements on primitive fields
+
+As a very first example, let's assume we would like to create a data processor that filters numerical values that are above a given threshold.
+Consequently, any data stream that is connected to the filter processor needs to provide a numerical value.
+
+The stream requirement would be assigned as follows:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredProperty(EpRequirements.numberReq())
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+Note the line starting with ``requiredProperty``, which requires any stream to provide a datatype of type ``number``.
+
+In many cases, you'll want to let the user select a specific field from a data stream from all available fields that match the specified requirement. For that, you simply use the method ``requiredPropertyWithUnaryMapping`` as follows:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create(ID, PIPELINE_ELEMENT_NAME, DESCRIPTION)
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.numberReq(),
+                    Labels.from("number-mapping", "The value that should be filtered", ""), PropertyScope.NONE)
+                    .build())
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+See also the developer guide on [static properties](dev-guide-static-properties.md) to better understand the usage of ``MappingProperties``.
+
+Requirements on primitive fields can be specified for all common datatypes:
+
+```java
+ @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.requirements" +
+            ".simple", "Simple requirements specification examples", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.numberReq()) // any number
+                    .requiredProperty(EpRequirements.doubleReq()) // any field of type double
+                    .requiredProperty(EpRequirements.booleanReq()) // any field of type boolean
+                    .requiredProperty(EpRequirements.integerReq()) // any field of type integer
+                    .requiredProperty(EpRequirements.stringReq()) // any field of type string
+
+                    .requiredProperty(EpRequirements.anyProperty()) // any field allowed (no restriction)
+                    .requiredProperty(EpRequirements.timestampReq())  // any timestamp field
+                    .build())
+
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+```
+
+### Specifying semantics
+
+For some algorithms, only specifying the datatype is not sufficient. Let's consider a geofencing algorithm that detects the precense some geospatial coordinate (e.g., from a vehicle) within a given location.
+
+You could specify something like this:
+
+```java
+    StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE)
+    .requiredPropertyWithUnaryMapping(EpRequirements.doubleEp(), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE)
+    .build()
+```
+
+However, this would allow users to create strange pipelines as users could connect any stream containing a double value to our geofencing algorithm.
+To avoid such situations, you can also specify requirements based on the semantics of a field:
+
+```java
+    StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Latitude), Labels.from("mapping-latitude", "Latitude", ""), PropertyScope.NONE)
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(SO.Longitude), Labels.from("mapping-longitude", "Longitude", ""), PropertyScope.NONE)
+    .build()
+```
+
+Note that in this case, we make use of Schema.org's ``Latitude`` concept ([https://schema.org/latitude](https://schema.org/latitude)). StreamPipes already includes popular vocabularies for specifying semantics. You are also free to use your own vocabularies.
+
+
+## Requirements on lists
+
+Similarly to primitive requirements, you can define processors that require data streams with list fields, see the following examples:
+
+```java
+@Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("org.streampipes.examples.requirements" +
+            ".list", "List requirements specification examples", "")
+            .requiredStream(StreamRequirementsBuilder.
+                    create()
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Integer))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Double))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.Boolean))
+                    .requiredProperty(EpRequirements.listRequirement(Datatypes.String))
+                    .build())
+
+
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+
+            .build();
+  }
+```
+
+## Requirements on nested properties
+
+(coming soon, see the Javadoc for now)
+
+
+
diff --git a/documentation/docs/dev-guide-tutorial-processors.md b/documentation/docs/dev-guide-tutorial-processors.md
new file mode 100644
index 0000000..9fc83d4
--- /dev/null
+++ b/documentation/docs/dev-guide-tutorial-processors.md
@@ -0,0 +1,499 @@
+---
+id: dev-guide-tutorial-processors
+title: Tutorial: Data Processors
+sidebar_label: Tutorial: Data Processors
+---
+
+In this tutorial, we will add a new data processor using the Apache Flink wrapper.
+
+From an architectural point of view, we will create a self-contained service that includes the description of the data processor and a Flink-compatible implementation.
+Once a pipeline is started that uses this data processor, the implementation is submitted to an Apache Flink cluster.
+
+## Objective
+
+We are going to create a new data processor that realized a simple geofencing algorithm - we detect vehicles that enter a specified radius around a user-defined location.
+This pipeline element will be a generic element that works with any event stream that provides geospatial coordinates in form of a latitude/longitude pair.
+
+The algorithm outputs every location event once the position has entered the geofence.
+
+
+<div class="admonition note">
+<div class="admonition-title">Note</div>
+<p>The implementation in this tutorial is pretty simple - our processor will fire an event every time the GPS location is inside the geofence.
+       In a real-world application, you would probably want to define a pattern that recognizes the _first_ event a vehicle enters the geofence.<br/>
+       This can be easily done using a CEP library, e.g., Apache Flink CEP.</p>
+</div>
+
+
+## Project setup
+
+To create new projects from scratch, several Maven archetypes exist to start developing.
+Enter the following command to create a new project based on the StreamPipes ``Processors-Flink`` archetype:
+
+```
+mvn archetype:generate -DarchetypeGroupId=org.streampipes \
+-DarchetypeArtifactId=streampipes-archetype-pe-processors-flink -DarchetypeVersion=0.64.0 \
+-DgroupId=org.streampipes.tutorial -DartifactId=geofence-tutorial -DclassNamePrefix=Geofencing -DpackageName=geofencing
+```
+
+Once you've imported the generated project, the project structure should look as follows:
+
+<img src="/img/tutorial-processors/project-structure-processor.PNG" alt="Project Structure Data Processor">
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.</p>
+</div>
+
+Now you're ready to create your first data processor for StreamPipes!
+
+## Adding data processor requirements
+
+First, we will add a new stream requirement.
+Open the class `GeofencingController` which should look as follows:
+
+```java
+package org.streampipes.tutorial.pe.processor.geofencing;
+
+import org.streampipes.tutorial.config.Config;
+
+import org.streampipes.model.DataProcessorType;
+import org.streampipes.model.graph.DataProcessorDescription;
+import org.streampipes.model.graph.DataProcessorInvocation;
+import org.streampipes.sdk.builder.ProcessingElementBuilder;
+import org.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.streampipes.sdk.extractor.ProcessingElementParameterExtractor;
+import org.streampipes.sdk.helpers.EpRequirements;
+import org.streampipes.sdk.helpers.Labels;
+import org.streampipes.sdk.helpers.OutputStrategies;
+import org.streampipes.sdk.helpers.SupportedFormats;
+import org.streampipes.sdk.helpers.SupportedProtocols;
+import org.streampipes.wrapper.flink.FlinkDataProcessorDeclarer;
+import org.streampipes.wrapper.flink.FlinkDataProcessorRuntime;
+
+public class GeofencingController extends
+				FlinkDataProcessorDeclarer<GeofencingParameters> {
+
+	private static final String EXAMPLE_KEY = "example-key";
+
+	@Override
+	public DataProcessorDescription declareModel() {
+		return ProcessingElementBuilder.create("org.streampipes.tutorial-geofencing")
+						.category(DataProcessorType.ENRICH)
+                        .withAssets(Assets.DOCUMENTATION, Assets.ICON)
+						.withLocales(Locales.EN)
+						.requiredStream(StreamRequirementsBuilder
+							.create()
+							.requiredProperty(EpRequirements.anyProperty())
+							.build())
+						.outputStrategy(OutputStrategies.keep())
+						.requiredTextParameter(Labels.from(EXAMPLE_KEY, "Example Text Parameter", "Example " +
+				"Text Parameter Description"))
+						.build();
+	}
+
+	@Override
+	public FlinkDataProcessorRuntime<GeofencingParameters> getRuntime(DataProcessorInvocation
+				graph, ProcessingElementParameterExtractor extractor) {
+
+		String exampleString = extractor.singleValueParameter(EXAMPLE_KEY, String.class);
+
+		GeofencingParameters params = new GeofencingParameters(graph, exampleString);
+
+		return new GeofencingProgram(params, Config.INSTANCE.getDebug());
+	}
+
+}
+
+```
+
+In this class, we need to implement two methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI.
+The second method, `getRuntime` is used to create and deploy the parameterized Flink program once a pipeline using this element is started.
+
+Similar to data sources, the SDK provides a builder class to generate the description for data processors.
+Delete the content within the ``declareModel`` method and add the following lines to the `declareModel` method:
+
+```java
+return ProcessingElementBuilder.create("org.streampipes.tutorial.geofencing", "Geofencing", "A simple geofencing data processor " +
+            "using the Apache Flink wrapper")
+```
+
+This creates a new data processor with the ID, title and description assigned to the element builder.
+Next, we add some _stream requirements_ to the description. As we'd like to develop a generic pipeline element that works with any event that provides a lat/lng pair, we define two stream requirements as stated below:
+
+```java
+.requiredStream(StreamRequirementsBuilder
+    .create()
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat),
+            Labels.from("latitude-field", "Latitude", "The event " +
+            "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng),
+            Labels.from("longitude-field", "Longitude", "The event " +
+                    "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+    .build())
+```
+
+The first line, `.requiredStream()` defines that we want a data processor with exactly one input stream. Adding more stream requirements would create elements with multiple input connectors in StreamPipes.
+Stream requirements can be assigned by using the `StreamRequirementsBuilder` class.
+In our example, we define two requirements, so-called _domain property requirements_. In contrast to _data type requirements_ where we'd expect an event property with a field of a specific data type (e.g., float), domain property requirements expect a specific domain property, e.g., from a vocabulary such as the WGS84 Geo vocab.
+
+Once a pipeline is deployed, we are interested in the actual field (and its field name) that contains the latitude and longitude values.
+In some cases, there might me more than one field that satisfies a property requirement and we would like users to select the property the geofencing component should operate on.
+Therefore, our example uses the method `requiredPropertyWithUnaryMapping`, which will map a requirement to a real event property of an input stream and  let the user choose the appropriate field in the StreamPipes UI when pipelines are defined.
+
+Finally, the `PropertyScope` indicates that the required property is a measurement value (in contrast to a dimension value). This allows us later to provide improved user guidance in the pipeline editor.
+
+Besides requirements, users should be able to define the center coordinate of the Geofence and the size of the fence defined as a radius around the center in meters.
+The radius can be defined by adding a simple required text field to the description:
+
+```java
+.requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.", 0, 1000, 1)
+```
+
+Similar to mapping properties, text parameters have an internalId (radius), a label and a description.
+In addition, we can assign a _value specification_ to the parameter indicating the value range we support.
+Our example supports a radius value between 0 and 1000 with a granularity of 1.
+In the StreamPipes UI, a required text parameter is rendered as a text input field, in case we provide an optional value specification, a slider input is automatically generated.
+
+Such user-defined parameters are called _static properties_. There are many different types of static properties (see the [Processor SDK](dev-guide-sdk-guide-processors#docsNav) for an overview).
+
+One example are _DomainProperties_ we use for defining the center of the geofence.
+Our data processor requires a lat/lng pair that marks the center of the geofence.
+However, letting users directly input latitude and longitude coordinates wouldn't be very user-friendly.
+Therefore, we can link required text parameters to _ontology concepts_. By understanding the required input, the StreamPipes UI is able to determine which user interface works best for a specific concept.
+
+Add the following line to the `declareModel` method:
+
+```java
+.requiredOntologyConcept(Labels.from("location", "Geofence Center", "Provide the coordinate of the " +
+    "geofence center"), OntologyProperties.mandatory(Geo.lat), OntologyProperties.mandatory(Geo.lng))
+
+```
+
+We've now defined that we would like to receive an instance that provides a latitude and a longitude coordinate.
+Users can input these values either manually, or they can look up _domain knowledge_, i.e., knowledge stored isolated from the stream definition.
+
+Finally, we need to define technical requirements of the data processor, called _groundings_.
+StreamPipes supports various runtime data formats (e.g., JSON or Thrift) and various protocols (e.g., Kafka or JMS).
+Each component defines supports formats and protocol separately.
+For our example, we'd like to support JSON-based messages and Kafka as input and output broker protocol, so append the following:
+
+```java
+.supportedProtocols(SupportedProtocols.kafka())
+.supportedFormats(SupportedFormats.jsonFormat())
+.build();
+```
+
+Now we need to define the output of our Geofencing pipeline element.
+As explained in the first section, the element should fire every time some geo-located entity arrives within the defined geofence.
+Therefore, the processor outputs the same schema as it receives as an input.
+Although we don't know the exact input right now as it depends on the stream users connect in StreamPipes when creating pipelines, we can define an _output strategy_ as follows:
+
+```java
+.outputStrategy(OutputStrategies.keep())
+```
+
+This defines a _KeepOutputStrategy_, i.e., the input event schema is not modified by the processor.
+There are many more output strategies you can define depending on the functionality you desire, e.g., _AppendOutput_ for defining a processor that enriches events or _CustomOutput_ in case you would like users to select the output by themselves.
+
+That's it! We've now defined input requirements, required user input, technical requirements concerned with data format and protocol and an output strategy.
+In the next section, you will learn how to extract these parameters once the pipeline element is invoked after a pipeline was created.
+
+## Pipeline element invocation
+
+Once users start a pipeline that uses our geofencing component, the _getRuntime_ method in our class is called. The class `DataSinkInovcation` includes a graph that contains information on the configuration parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element.
+
+Before we explain in more detail how to extract these values from the processor invocation, we need to adapt a little helper class.
+Open the file ```GeofencingParameters``` and modify it as follows:
+
+```java
+public class GeofencingParameters extends EventProcessorBindingParams {
+
+  private String latitudeFieldName;
+  private String longitudeFieldName;
+
+  private Float centerLatitude;
+  private Float centerLongitude;
+
+  private Integer radius;
+
+  public GeofencingParameters(DataProcessorInvocation graph, String latitudeFieldName, String longitudeFieldName,
+                              Float centerLatitude, Float centerLongitude, Integer radius) {
+    super(graph);
+    this.latitudeFieldName = latitudeFieldName;
+    this.longitudeFieldName = longitudeFieldName;
+    this.centerLatitude = centerLatitude;
+    this.centerLongitude = centerLongitude;
+    this.radius = radius;
+  }
+
+  public String getLatitudeFieldName() {
+    return latitudeFieldName;
+  }
+
+  public String getLongitudeFieldName() {
+    return longitudeFieldName;
+  }
+
+  public Float getCenterLatitude() {
+    return centerLatitude;
+  }
+
+  public Float getCenterLongitude() {
+    return centerLongitude;
+  }
+
+  public Integer getRadius() {
+    return radius;
+  }
+```
+
+This simple Pojo class will later serve to store user-defined parameters in a single object.
+
+Now we go back to the controller class and extract these values from the invocation object.
+
+The ``ProessingElementParameterExtractor``  provides convenience methods to extract the relevant information from the `DataProcessorInvocation` object.
+
+Next, we are interested in the fields of the input event stream that contains the latitude and longitude value we would like to compute against the geofence center location as follows:
+
+```java
+String latitudeFieldName = extractor.mappingPropertyValue("latitude-field");
+String longitudeFieldName = extractor.mappingPropertyValue("longitude-field");
+```
+
+We use the same `internalId` we've used to define the mapping property requirements in the `declareModel` method.
+
+Next, for extracting the geofence center coordinates, we use the following statements:
+
+```java
+Float centerLatitude = extractor.supportedOntologyPropertyValue("location", Geo.lat, Float.class);
+Float centerLongitude = extractor.supportedOntologyPropertyValue("location", Geo.lng, Float.class);
+```
+
+The radius value can be extracted as follows:
+
+```java
+Integer radius = extractor.singleValueParameter("radius", Integer.class);
+```
+
+Now we can create a new instance of our previously created parameter class:
+
+```java
+GeofencingParameters params = new GeofencingParameters(dataProcessorInvocation, latitudeFieldName,
+            longitudeFieldName, centerLatitude, centerLongitude, radius);
+```
+
+Finally, return an instance of the class ```GeofencingProgram```:
+
+```java
+return new GeofencingProgram(params, true);
+```
+
+<div class="admonition tip">
+<div class="admonition-title">Info</div>
+<p>The line above uses the Flink MiniCluster to start the Flink program for debugging purposes.
+       Before you build the project and use it in a real environment, replace the line as follows, which triggers cluster execution:
+       <code>return new GeofencingProgram(params, new FlinkDeploymentConfig(Config.JAR_FILE, Config.INSTANCE.getFlinkHost(), Config.INSTANCE.getFlinkPort())</code></p>
+</div>
+
+
+Great! That's all we need to describe a data processor for usage in StreamPipes. Your controller class should look as follows:
+
+```java
+import org.streampipes.model.graph.DataProcessorDescription;
+import org.streampipes.model.graph.DataProcessorInvocation;
+import org.streampipes.model.schema.PropertyScope;
+import org.streampipes.sdk.builder.ProcessingElementBuilder;
+import org.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.streampipes.sdk.extractor.ProcessingElementParameterExtractor;
+import org.streampipes.sdk.helpers.EpRequirements;
+import org.streampipes.sdk.helpers.Labels;
+import org.streampipes.sdk.helpers.OntologyProperties;
+import org.streampipes.sdk.helpers.OutputStrategies;
+import org.streampipes.sdk.helpers.SupportedFormats;
+import org.streampipes.sdk.helpers.SupportedProtocols;
+import org.streampipes.vocabulary.Geo;
+import org.streampipes.wrapper.flink.FlinkDataProcessorDeclarer;
+import org.streampipes.wrapper.flink.FlinkDataProcessorRuntime;
+
+public class GeofencingController extends FlinkDataProcessorDeclarer<GeofencingParameters> {
+
+  @Override
+  protected FlinkDataProcessorRuntime<GeofencingParameters> getRuntime(DataProcessorInvocation dataProcessorInvocation) {
+    ProcessingElementParameterExtractor extractor = ProcessingElementParameterExtractor.from(dataProcessorInvocation);
+
+    String latitudeFieldName = extractor.mappingPropertyValue("latitude-field");
+    String longitudeFieldName = extractor.mappingPropertyValue("longitude-field");
+
+    Float centerLatitude = extractor.supportedOntologyPropertyValue("location", Geo.lat, Float.class);
+    Float centerLongitude = extractor.supportedOntologyPropertyValue("location", Geo.lng, Float.class);
+
+    Integer radius = extractor.singleValueParameter("radius", Integer.class);
+
+    GeofencingParameters params = new GeofencingParameters(dataProcessorInvocation, latitudeFieldName,
+            longitudeFieldName, centerLatitude, centerLongitude, radius);
+
+    return new GeofencingProgram(params);
+  }
+
+  @Override
+  public DataProcessorDescription declareModel() {
+    return ProcessingElementBuilder.create("geofencing-flink", "Geofencing", "A simple geofencing data processor " +
+            "using the Apache Flink wrapper")
+            .requiredStream(StreamRequirementsBuilder
+                    .create()
+                    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lat),
+                            Labels.from("latitude-field", "Latitude", "The event " +
+                            "property containing the latitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+                    .requiredPropertyWithUnaryMapping(EpRequirements.domainPropertyReq(Geo.lng),
+                            Labels.from("longitude-field", "Longitude", "The event " +
+                                    "property containing the longitude value"), PropertyScope.MEASUREMENT_PROPERTY)
+                    .build())
+            .requiredIntegerParameter("radius", "Geofence Size", "The size of the circular geofence in meters.",
+                    0, 1000, 1)
+            .requiredOntologyConcept(Labels.from("location", "Geofence Center", "Provide the coordinate of the " +
+                    "geofence center"), OntologyProperties.mandatory(Geo.lat), OntologyProperties.mandatory(Geo.lng))
+            .supportedProtocols(SupportedProtocols.kafka())
+            .supportedFormats(SupportedFormats.jsonFormat())
+            .outputStrategy(OutputStrategies.keep())
+            .build();
+  }
+}
+
+```
+
+## Adding an implementation
+
+Everything we need to do now is to add an implementation which does not differ from writing an Apache Flink topology.
+
+Open the class `GeofencingProcessor.java` and add the following piece of code, which realizes the Geofencing functionality:
+
+```java
+public class GeofencingProcessor implements FlatMapFunction<Map<String, Object>, Map<String, Object>> {
+
+  private String latitudeFieldName;
+  private String longitudeFieldName;
+
+  private Float centerLatitude;
+  private Float centerLongitude;
+
+  private Integer radius;
+
+  public GeofencingProcessor(String latitudeFieldName, String longitudeFieldName, Float centerLatitude, Float centerLongitude, Integer radius) {
+    this.latitudeFieldName = latitudeFieldName;
+    this.longitudeFieldName = longitudeFieldName;
+    this.centerLatitude = centerLatitude;
+    this.centerLongitude = centerLongitude;
+    this.radius = radius;
+  }
+
+  @Override
+  public void flatMap(Event in, Collector<Event> out) throws Exception {
+    Float latitude = in.getFieldBySelector(latitudeFieldName).getAsPrimitive().getAsFloat();
+    Float longitude = in.getFieldBySelector(longitudeFieldName).getAsPrimitive().getAsFloat();
+
+    Float distance = distFrom(latitude, longitude, centerLatitude, centerLongitude);
+
+    if (distance <= radius) {
+      out.collect(in);
+    }
+  }
+
+  public static Float distFrom(float lat1, float lng1, float lat2, float lng2) {
+    double earthRadius = 6371000;
+    double dLat = Math.toRadians(lat2-lat1);
+    double dLng = Math.toRadians(lng2-lng1);
+    double a = Math.sin(dLat/2) * Math.sin(dLat/2) +
+            Math.cos(Math.toRadians(lat1)) * Math.cos(Math.toRadians(lat2)) *
+                    Math.sin(dLng/2) * Math.sin(dLng/2);
+    double c = 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1-a));
+    return (float) (earthRadius * c);
+  }
+
+}
+```
+
+We won't go into details here as this isn't StreamPipes-related code, but in general the class extracts latitude and longitude fields from the input event (which is provided as a map data type) and calculates the distance between the geofence center and these coordinates.
+If the distance is below the given radius, the event is forwarded to the next operator.
+
+Finally, we need to connect this program to the Flink topology. StreamPipes automatically adds things like the Kafka consumer and producer, so that you only need to invoke the actual geofencing processor.
+Open the file `GeofencingProgram` and append the following line inside the `getApplicationLogic()` method:
+
+```java
+return dataStreams[0].flatMap(new GeofencingProcessor(params.getLatitudeFieldName(), params.getLongitudeFieldName(),
+    params.getCenterLatitude(), params.getCenterLongitude(), params.getRadius()));
+```
+
+## Preparing the container
+The final step is to define the deployment type of our new data source. In this tutorial, we will create a so-called `StandaloneModelSubmitter`.
+This client will start an embedded web server that provides the description of our data source and automatically submits the program to the registered Apache Flink cluster.
+
+Go to the class `Init` that extends `StandaloneModelSubmitter` and should look as follows:
+```java
+package org.streampipes.tutorial.main;
+
+import org.streampipes.container.init.DeclarersSingleton;
+import org.streampipes.container.standalone.init.StandaloneModelSubmitter;
+
+import org.streampipes.tutorial.config.Config;
+import org.streampipes.tutorial.pe.processor.geofencing.GeofencingController;
+
+public class Init extends StandaloneModelSubmitter {
+
+  public static void main(String[] args) throws Exception {
+    DeclarersSingleton.getInstance()
+            .add(new GeofencingController());
+
+    new Init().init(Config.INSTANCE);
+
+  }
+}
+```
+
+<div class="admonition info">
+<div class="admonition-title">Info</div>
+<p>In the example above, we make use of a class `Config`.
+       This class contains both mandatory and additional configuration parameters required by a pipeline element container.
+       These values are stored in the Consul-based key-value store of your StreamPipes installation.
+       The SDK guide contains a detailed manual on managing container configurations.
+</p>
+</div>
+
+## Starting the container
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>By default, the container registers itself using the hostname later used by the Docker container, leading to a 404 error when you try to access an RDF description.
+       For local development, we provide an environment file in the ``development`` folder. You can add your hostname here, which will override settings from the Config class.
+       For instance, use the IntelliJ ``EnvFile`` plugin to automatically provide the environment variables upon start.
+</p>
+</div>
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p> The default port of all pipeline element containers as defined in the `Config` file is port 8090.
+       If you'd like to run mutliple containers at the same time on your development machine, change the port in the environment file.
+</p>
+</div>
+
+Now we are ready to start our container!
+
+Execute the main method in the class `Main` we've just created, open a web browser and navigate to http://localhost:8090 (or the port you have assigned in the environment file).
+
+You should see something as follows:
+
+<img src="/img/tutorial-processors/pe-overview-flink.PNG" alt="Pipeline Element Container Overview (Flink)">
+
+Click on the link of the data source to see the RDF description of the pipeline element.
+
+<img src="/img/tutorial-processors/pe-rdf-geofencing.PNG" alt="Geofencing RDF description">
+
+The container automatically registers itself in the Consul installation of StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](user-guide-installation).
+
+## Read more
+
+Congratulations! You've just created your first data processor for StreamPipes.
+There are many more things to explore and data processors can be defined in much more detail using multiple wrappers.
+Follow our [SDK guide](dev-guide-sdk-guide-processors) to see what's possible!
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-tutorial-sinks.md b/documentation/docs/dev-guide-tutorial-sinks.md
new file mode 100644
index 0000000..2e684fb
--- /dev/null
+++ b/documentation/docs/dev-guide-tutorial-sinks.md
@@ -0,0 +1,246 @@
+---
+id: dev-guide-tutorial-sinks
+title: Tutorial: Data Sinks
+sidebar_label: Tutorial: Data Sinks
+---
+
+In this tutorial, we will add a new data sink using the standalone wrapper.
+
+From an architectural point of view, we will create a self-contained service that includes the description of the data sink and a corresponding implementation.
+
+## Objective
+
+We are going to create a new data sink that calls an external HTTP endpoint to forward data to an external service.
+
+For each incoming event, an external service is invoked using an HTTP POST request. In this example, we'll call an endpoint provided by [RequestBin](https://requestbin.com/).
+To setup your own endpoint, go to [https://requestbin.com/](https://requestbin.com/) and click "Create a request bin". Copy the URL of the newly created endpoint.
+
+
+## Project setup
+
+We'll create a new project using the provided sinks-standalone-jvm Maven archetype.
+Enter the following command to create a new project based on the StreamPipes ``Sinks-JVM`` archetype:
+
+```
+mvn archetype:generate -DarchetypeGroupId=org.streampipes \
+-DarchetypeArtifactId=streampipes-archetype-pe-sinks-jvm -DarchetypeVersion=0.64.0 \
+-DgroupId=org.streampipes.tutorial -DartifactId=sink-tutorial -DclassNamePrefix=Rest -DpackageName=geofencing
+```
+
+Once you've imported the generated project, the project structure should look as follows:
+
+<img src="/img/tutorial-sinks/project-structure-sinks.png" alt="Project Structure Data Sink">
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.</p>
+</div>
+
+Now you're ready to create your first data sink for StreamPipes!
+
+## Adding data sink requirements
+
+First, we will add a new stream requirement.
+Open the class `RestController` which should look as follows:
+
+```java
+package org.streampipes.tutorial.pe.sink.rest;
+
+import org.streampipes.model.DataSinkType;
+import org.streampipes.model.graph.DataSinkDescription;
+import org.streampipes.model.graph.DataSinkInvocation;
+import org.streampipes.sdk.builder.DataSinkBuilder;
+import org.streampipes.sdk.builder.StreamRequirementsBuilder;
+import org.streampipes.sdk.extractor.DataSinkParameterExtractor;
+import org.streampipes.sdk.helpers.EpRequirements;
+import org.streampipes.sdk.helpers.Labels;
+import org.streampipes.sdk.helpers.SupportedFormats;
+import org.streampipes.sdk.helpers.SupportedProtocols;
+import org.streampipes.wrapper.standalone.ConfiguredEventSink;
+import org.streampipes.wrapper.standalone.declarer.StandaloneEventSinkDeclarer;
+import org.streampipes.sdk.helpers.*;
+import org.streampipes.sdk.utils.Assets;
+
+public class RestController extends StandaloneEventSinkDeclarer<RestParameters> {
+
+	private static final String EXAMPLE_KEY = "example-key";
+
+	@Override
+	public DataSinkDescription declareModel() {
+		return DataSinkBuilder.create("org.streampipes.tutorial.pe.sink.rest")
+						.category(DataSinkType.NOTIFICATION)
+						.withAssets(Assets.DOCUMENTATION, Assets.ICON)
+						.withLocales(Locales.EN)
+						.requiredStream(StreamRequirementsBuilder
+                                            .create()
+                                            .requiredPropertyWithNaryMapping(EpRequirements.anyProperty(), Labels.withId(
+                                                    "fields-to-send"), PropertyScope.NONE)
+                                            .build())
+						.build();
+	}
+
+	@Override
+	public ConfiguredEventSink<RestParameters> onInvocation(DataSinkInvocation graph, DataSinkParameterExtractor extractor) {
+
+		 List<String> fieldsToSend = extractor.mappingPropertyValues("fields-to-send");
+        
+         RestParameters params = new RestParameters(graph, fieldsToSend);
+
+		return new ConfiguredEventSink<>(params, Rest::new);
+	}
+
+}
+
+```
+
+In this class, we need to implement two methods: The `declareModel` method is used to define abstract stream requirements such as event properties that must be present in any input stream that is later connected to the element using the StreamPipes UI.
+The second method, `onInvocation` is used to create and deploy program once a pipeline using this sink is started.
+
+The ``declareModel`` method describes the properties of our data sink:
+* ``category`` defines a category for this sink.
+* ``withAssets`` denotes that we will provide an external documentation file and an icon, which can be found in the ``resources`` folder
+* ``withLocales`` defines that we will provide an external language file, also available in the ``resources`` folder
+* ``requiredStream`` defines requirements any input stream connected to this sink must provide. In this case, we do not have any specific requirements, we just forward all incoming events to the REST sink. However, we want to let the user display a list of available fields from the connected input event, where users can select a subset. This is defined by defining a Mapping from the empty requirement. This will later on render a selection dialog in the pipeline editor.
+
+The ``onInvocation`` method is called when a pipeline containing the sink is started. Once a pipeline is started, we would like to extract user-defined parameters.
+In this example, we simply extract the fields selected by users that should be forwarded to the REST sink. Finally, we return a new configured event sink containing the parameters.
+
+## Pipeline element invocation
+
+Once users start a pipeline that uses our geofencing component, the _getRuntime_ method in our class is called. The class `DataSinkInovcation` includes a graph that contains information on the configuration parameters a users has selected in the pipeline editor and information on the acutal streams that are connected to the pipeline element.
+
+Before we explain in more detail how to extract these values from the processor invocation, we need to adapt a little helper class.
+Open the file ```RestParameters``` and modify it as follows:
+
+```java
+public class RestParameters extends EventSinkBindingParams {
+
+  private List<String> fieldsToSend;
+
+  public RestParameters(DataSinkInvocation graph, List<String> fieldsToSend) {
+    super(graph);
+    this.fieldsToSend = fieldsToSend;
+  }
+
+  public List<String> getFieldsToSend() {
+    return fieldsToSend;
+  }
+}
+```
+
+This file will later provide information on the configured pipeline element.
+
+## Adding an implementation
+
+Now open the class ``Rest`` to add the proper implementation (i.e., the Rest call executed for every incoming event).
+
+Our final class should look as follows:
+
+```java
+private static Logger LOG = LoggerFactory.getLogger(Rest.class.getCanonicalName());
+
+  private static final String REST_ENDPOINT_URI = YOUR_REQUEST_BIN_URL;
+  private List<String> fieldsToSend;
+  private SpDataFormatDefinition dataFormatDefinition;
+
+  public Rest() {
+    this.dataFormatDefinition = new JsonDataFormatDefinition();
+  }
+
+  @Override
+  public void onInvocation(RestParameters parameters, EventSinkRuntimeContext runtimeContext) {
+    this.fieldsToSend = parameters.getFieldsToSend();
+  }
+
+  @Override
+  public void onEvent(Event event) {
+    Map<String, Object> outEventMap = event.getSubset(fieldsToSend).getRaw();
+    try {
+      String json = new String(dataFormatDefinition.fromMap(outEventMap));
+      Request.Post(REST_ENDPOINT_URI).body(new StringEntity(json, Charsets.UTF_8)).execute();
+    } catch (SpRuntimeException e) {
+      LOG.error("Could not parse incoming event");
+    } catch (IOException e) {
+      LOG.error("Could not reach endpoint at {}", REST_ENDPOINT_URI);
+    }
+  }
+
+  @Override
+  public void onDetach() {
+
+  }
+```
+The only class variable you need to change right now is the REST_ENDPOINT_URL. Change this url to the URL provided by your request bin.
+We'll ignore the other class variables and the constructor for now. Important are three methods, ``onInvocation``, ``onEvent`` and ``onDetach``.
+
+The ``onInvocation`` method is called once a pipeline containing our REST data sink is started. The ``onEvent`` method is called for each incoming event. Finally, ``onDetach`` is called when a pipeline is stopped.
+
+In the ``onInvocation`` method, we can extract the selected fields to be forwarded to the REST endpoint.
+In the ``ònEvent`` method, we use a helper method to get a subset of the incoming event.
+Finally, we convert the resulting ``Map`` to a JSON string and call the endpoint.
+
+
+## Preparing the container
+The final step is to define the deployment type of our new data source. In this tutorial, we will create a so-called `StandaloneModelSubmitter`.
+This client will start an embedded web server that provides the description of our data source and automatically starts the program in an embedded container.
+
+Go to the class `Init` that extends `StandaloneModelSubmitter` and should look as follows:
+```java
+
+public static void main(String[] args) throws Exception {
+    DeclarersSingleton.getInstance()
+            .add(new RestController());
+
+    DeclarersSingleton.getInstance().setPort(Config.INSTANCE.getPort());
+    DeclarersSingleton.getInstance().setHostName(Config.INSTANCE.getHost());
+
+    DeclarersSingleton.getInstance().registerDataFormats(new JsonDataFormatFactory(),
+            new CborDataFormatFactory(),
+            new SmileDataFormatFactory(),
+            new FstDataFormatFactory());
+
+    DeclarersSingleton.getInstance().registerProtocols(new SpKafkaProtocolFactory(),
+            new SpJmsProtocolFactory());
+
+    new Init().init(Config.INSTANCE);
+```
+
+<div class="admonition info">
+<div class="admonition-title">Info</div>
+<p>In the example above, we make use of a class `Config`.
+       This class contains both mandatory and additional configuration parameters required by a pipeline element container.
+       These values are stored in the Consul-based key-value store of your StreamPipes installation.
+       The SDK guide contains a detailed manual on managing container configurations.
+</p>
+</div>
+
+## Starting the container
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>By default, the container registers itself using the hostname later used by the Docker container, leading to a 404 error when you try to access an RDF description.
+       For local development, we provide an environment file in the ``development`` folder. You can add your hostname here, which will override settings from the Config class.
+       For instance, use the IntelliJ ``EnvFile`` plugin to automatically provide the environment variables upon start.
+</p>
+</div>
+
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p> The default port of all pipeline element containers as defined in the `Config` file is port 8090.
+       If you'd like to run mutliple containers at the same time on your development machine, change the port in the environment file.
+</p>
+</div>
+
+Now we are ready to start our container!
+
+Execute the main method in the class `Main` we've just created.
+
+The container automatically registers itself in the Consul installation of StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](user-guide-installation).
+
+## Read more
+
+Congratulations! You've just created your first data sink for StreamPipes.
+There are many more things to explore and data sinks can be defined in much more detail using multiple wrappers.
+Follow our [SDK guide](dev-guide-sdk-guide-sinks) to see what's possible!
\ No newline at end of file
diff --git a/documentation/docs/dev-guide-tutorial-sources.md b/documentation/docs/dev-guide-tutorial-sources.md
new file mode 100644
index 0000000..2489e85
--- /dev/null
+++ b/documentation/docs/dev-guide-tutorial-sources.md
@@ -0,0 +1,282 @@
+---
+id: dev-guide-tutorial-sources
+title: Tutorial: Data Sources
+sidebar_label: Tutorial: Data Sources
+---
+
+In this tutorial, we will add a new data source consisting of a single data stream. The source will be provided as a standalone component (i.e., the description will be accessible through an integrated web server).
+
+## Objective
+
+We are going to create a new data stream that is produced by a GPS sensor installed in a delivery vehicle.
+The sensor produces a continuous stream of events that contain the current timestamp, the current lat/lng position of the vehicle and the plate number of the vehicle.
+Events are published in a JSON format as follows:
+```json
+{
+  "timestamp" : 145838399,
+  "latitude" : 37.04,
+  "longitude" : 17.04,
+  "plateNumber" : "KA-AB 123"
+}
+```
+
+These events are published to a Kafka broker using the topic `org.streampipes.tutorial.vehicle`.
+
+In the following section, we show how to describe this stream in a form that allows you to import and use it in StreamPipes.
+
+## Project setup
+
+Instead of creating a new project from scratch, we recommend to use the Maven archetype to create a new project skeleton.
+Enter the following command in a command line of your choice (Apache Maven needs to be installed):
+
+```
+mvn archetype:generate \
+-DarchetypeGroupId=org.streampipes -DarchetypeArtifactId=streampipes-archetype-pe-sources \
+-DarchetypeVersion=0.64.0 -DgroupId=my.groupId \
+-DartifactId=my-source -DclassNamePrefix=MySource -DpackageName=mypackagename
+```
+
+Configure the variables ``artifactId`` (which will be the Maven artifactId), ``classNamePrefix`` (which will be the class name of your data stream) and ``packageName``.
+
+For this tutorial, use ``Vehicle`` as ``classNamePrefix``.
+
+Your project will look as follows:
+
+<img src="/img/tutorial-sources/project-structure.PNG" alt="Project Structure">
+
+That's it, go to the next section to learn how to create your first data stream!
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>Besides the basic project skeleton, the sample project also includes an example Dockerfile you can use to package your application into a Docker container.
+</p>
+</div>
+
+## Adding a data stream description
+
+Now we will add a new data stream definition.
+First, open the class `VehicleStream` which should look as follows:
+
+```java
+
+package my.groupId.pe.mypackagename;
+
+import org.streampipes.model.SpDataStream;
+import org.streampipes.model.graph.DataSourceDescription;
+import org.streampipes.sdk.builder.DataStreamBuilder;
+import org.streampipes.sdk.helpers.EpProperties;
+import org.streampipes.sdk.helpers.Formats;
+import org.streampipes.sdk.helpers.Protocols;
+import org.streampipes.sources.AbstractAdapterIncludedStream;
+
+
+public class MySourceStream extends AbstractAdapterIncludedStream {
+
+  @Override
+  public SpDataStream declareModel(DataSourceDescription sep) {
+    return DataStreamBuilder.create("my.groupId-mypackagename", "MySource", "")
+            .property(EpProperties.timestampProperty("timestamp"))
+
+            // configure your stream here
+
+            .format(Formats.jsonFormat())
+            .protocol(Protocols.kafka("localhost", 9092, "TOPIC_SHOULD_BE_CHANGED"))
+            .build();
+  }
+
+  @Override
+  public void executeStream() {
+
+  }
+}
+```
+
+This class extends the class ``AbstractAdapterIncludedStream``, which indicates that this source continuously produces data (configured in the ``executeStream()`` method.
+In contrast, the class `AbstractAlreadyExistingStream` indicates that we only want to describe an already existing stream (e.g., a stream that already sends data to an existing Kafka broker).
+
+No we will add the definition of the data stream. Add the following code inside of the `declareModel` method:
+```java
+return DataStreamBuilder.create("org.streampipes.tutorial.vehicle.position", "Vehicle Position", "An event stream " +
+          "that produces current vehicle positions")
+```
+
+This line creates a new instance of the SDK's `DataStreamBuilder` by providing three basic parameters:
+The first parameter must be a unique identifier of your data stream.
+The second and third parameters indicate a label and a description of your stream.
+These values will later be used in the StreamPipes UI to display stream details in a human-readable manner.
+
+Next, we will add the properties as stated above to the stream definition by adding the following lines:
+```java
+.property(EpProperties.timestampProperty("timestamp"))
+.property(EpProperties.stringEp(Labels.from("plate-number", "Plate Number", "Denotes the plate number of the vehicle"), "plateNumber", "http://my.company/plateNumber"))
+.property(EpProperties.doubleEp(Labels.from("latitude", "Latitude", "Denotes the latitude value of the vehicle's position"), "latitude", Geo.lat))
+.property(EpProperties.doubleEp(Labels.from("longitude", "Longitude", "Denotes the longitude value of the vehicle's position"), "longitude", Geo.lng))
+```
+These four _event properties_ compose our _event schema_. An event property must, at least, provide the following attributes:
+
+* **Runtime Name**. The runtime name indicates the key of the property at runtime, e.g., if our JSON message contains a structure such as `{"plateNumber" : "KA-F 123"}`, the runtime name must be `plateNumber`.
+* **Runtime Type**. An event property must have a primitive type (we will later see how to model more complex properties such as lists and nested properties).
+The type must be an instance of `XMLSchema` primitives, however, the SDK provides convenience methods to provide the property type.
+* **Domain Property**. The domain property indicates the semantics of the event property. For instance, the `latitude` property is linked to the `http://www.w3.org/2003/01/geo/wgs84_pos#lat` property of the WGS84 vocabulary.
+The domain property should be an URI as part of an existing or domain-specific vocabulary. The SDK provides convenience methods for popuplar vocabularies (e.g., Schema.org, Dolce or WGS84).
+
+In order to complete the minimum required specification of an event stream, we need to provide information on the transport format and protocol of the data stream at runtime.
+
+This can be achieved by extending the builder with the respective properties (which should already have been auto-generated):
+```java
+.format(Formats.jsonFormat())
+.protocol(Protocols.kafka("localhost", 9092, "TOPIC_SHOULD_BE_CHANGED"))
+.build();
+```
+
+Set ``org.streampipes.tutorial.vehicle`` as your new topic by replacing the term ``TOPIC_SHOULD_BE_CHANGED`.
+
+In this example, we defined that the data stream consists of events in a JSON format and that Kafka is used as a message broker to transmit events.
+The last build() method call triggers the construction of the RDF-based data stream definition.
+
+That's it! In the next section, we will connect the data stream to a source and inspect the generated RDF description.
+
+## Creating some dummy data
+
+Let's assume our stream should produce some random values that are sent to StreamPipes. We'll add a very simple data simulator to the ``executeStream`` method as follows:
+
+```java
+@Override
+  public void executeStream() {
+
+    SpKafkaProducer producer = new SpKafkaProducer("localhost:9092", "TOPIC_SHOULD_BE_CHANGED");
+    Random random = new Random();
+    Runnable runnable = new Runnable() {
+      @Override
+      public void run() {
+        for (;;) {
+          JsonObject jsonObject = new JsonObject();
+          jsonObject.addProperty("timestamp", System.currentTimeMillis());
+          jsonObject.addProperty("plateNumber", "KA-FZ 1");
+          jsonObject.addProperty("latitude", random.nextDouble());
+          jsonObject.addProperty("longitude", random.nextDouble());
+
+          producer.publish(jsonObject.toString());
+
+          try {
+            Thread.sleep(1000);
+          } catch (InterruptedException e) {
+            e.printStackTrace();
+          }
+
+        }
+      }
+    };
+
+    new Thread(runnable).start();
+  }
+```
+
+Change the topic and the URL of your Kafka broker (as stated in the controller).
+
+## Adding a source description
+
+A data source can be seen like a container for a set of data streams. Usually, a data source includes events that are logically or physically connected.
+For instance, in our example we would add other streams produced by vehicle sensors (such as fuel consumption) to the same data source description.
+
+Open the class `DataSource` which should look as follows:
+```java
+
+package my.groupId.pe.mypackagename;
+
+import org.streampipes.container.declarer.DataStreamDeclarer;
+import org.streampipes.container.declarer.SemanticEventProducerDeclarer;
+import org.streampipes.model.graph.DataSourceDescription;
+import org.streampipes.sdk.builder.DataSourceBuilder;
+
+import java.util.Arrays;
+import java.util.List;
+
+
+public class DataSource implements SemanticEventProducerDeclarer {
+
+  public DataSourceDescription declareModel() {
+    return DataSourceBuilder.create("my.groupId.mypackagename.source", "MySource " +
+        "Source", "")
+            .build();
+  }
+
+  public List<DataStreamDeclarer> getEventStreams() {
+    return Arrays.asList(new MySourceStream());
+  }
+}
+```
+First, we need to define the source. Similar to data streams, a source consists of an id, a human-readable name and a description.
+Replace the content defined in the `declareModel` method with the following code:
+```java
+return DataSourceBuilder.create("org.streampipes.tutorial.source.vehicle", "Vehicle Source", "A data source that " +
+    "holds event streams produced by vehicles.")
+    .build();
+```
+
+## Preparing the container
+
+The final step is to define the deployment type of our new data source. In this tutorial, we will create a so-called `StandaloneModelSubmitter`.
+This client will start an embedded web server that provides the description of our data source.
+
+Go to the class `Init` that implements `StandaloneModelSubmitter`, which should look as follows:
+```java
+package my.groupId.main;
+
+import org.streampipes.container.init.DeclarersSingleton;
+import org.streampipes.container.standalone.init.StandaloneModelSubmitter;
+import my.groupId.config.Config;
+import my.groupId.pe.mypackagename.DataSource;
+
+public class Init extends StandaloneModelSubmitter {
+
+  public static void main(String[] args) throws Exception {
+    DeclarersSingleton.getInstance()
+            .add(new DataSource());
+
+    new Init().init(Config.INSTANCE);
+
+  }
+}
+```
+This code adds the `VehicleSource`. Finally, the `init` method is called
+which triggers the generation of the corresponding RDF description and startup of the web server.
+
+<div class="admonition info">
+<div class="admonition-title">Info</div>
+<p>In the example above, we make use of a class `Config`.
+       This class contains both mandatory and additional configuration parameters required by a pipeline element container.
+       These values are stored in the Consul-based key-value store of your StreamPipes installation.
+       The SDK guide contains a detailed manual on managing container configurations.</p>
+</div>
+
+## Starting the container
+
+<div class="admonition tip">
+<div class="admonition-title">Tip</div>
+<p>By default, the container registers itself using the hostname later used by the Docker container, leading to a 404 error when you try to access an RDF description.
+       For local development, we provide an environment file in the ``development`` folder. You can add your hostname here, which will override settings from the Config class.
+       For instance, use the IntelliJ ``EnvFile`` plugin to automatically provide the environment variables upon start.
+</p>
+</div>
+
+Now we are ready to start our first container!
+
+Execute the main method in the class `Main` we've just created, open a web browser and navigate to http://localhost:8090, or change the port according to the value of the ``SP_PORT`` variable in the env file.
+
+You should see something as follows:
+
+<img src="/img/tutorial-sources/pe-overview.PNG" alt="Pipeline Element Container Overview">
+
+Click on the link of the data source to see the RDF description of the pipeline element.
+
+<img src="/img/tutorial-sources/pe-rdf.PNG" alt="Pipeline Element RDF description">
+
+The container automatically registers itself in the Consul installation of StreamPipes.
+To install the just created element, open the StreamPipes UI and follow the manual provided in the [user guide](user-guide-introduction).
+
+## Read more
+
+Congratulations! You've just created your first pipeline element for StreamPipes.
+There are many more things to explore and data sources can be defined in much more detail.
+Follow our [SDK guide](dev-guide-source-sdk) to see what's possible!
\ No newline at end of file
diff --git a/documentation/docs/faq-common-problems.md b/documentation/docs/faq-common-problems.md
new file mode 100644
index 0000000..14195c0
--- /dev/null
+++ b/documentation/docs/faq-common-problems.md
@@ -0,0 +1,73 @@
+---
+id: faq-common-problems
+title: Common Problems
+sidebar_label: Common Problems
+---
+
+* Windows 10: Consul, Kafka, Zookeeper, or Kafka-Rest did not start
+* Linux / OSX: Consul does not start
+* Run StreamPipes in a VM in Windows
+* Only few processors are available in the pipeline editor
+* No data is shown in the live dashbord
+* Windows 10: Should I use settings windows containers or docker containers?
+* Configurations are not deleted
+* Help us to improve StreamPipes and this documentation
+* Docker Network already used
+
+## Windows 10: Consul, Kafka, Zookeeper, or Kafka-Rest did not start
+**Problem:** You get an error message similar to: `ERROR: for consul  Cannot start service consul: b'driver failed programming external connectivity on endpoint sp-test_consul_1 (eae0457fc03c1364b8e81a6e155ca4b95ee1e1d01bb3c1aa9dd5192bdcb7b91a): Error starting userland proxy: mkdir /port/tcp:0.0.0.0:8600:tcp:172.30.0.9:8600: input/output error`
+
+**Solution:** To resolve this problem, stop StreamPipes with `streampipes stop` and restart Docker via the Docker settings in the task bar.
+After Docker was restarted, run `streampipes start`.
+
+## Consul does not start
+**Problem:** After starting StreamPipes with `streampipes start`, there is an error with Consul:
+
+**Solution:** To resolve this, execute `streampipes stop`, wait a minute and start it again with `streampipes start`. If you've installed an old version of StreamPipes (before the installer was available), make sure that no network suffixed with `spnet` exists in Docker. Type `docker network ls` to check and `docker network rm NETWORK_NAME` to remove the existing network before running the installer.
+
+## Run StreamPipes in a VM in Windows
+**Problem:** StreamPipes does not work properly with Docker under Windows 8 or earlier versions.
+
+**Solution:** We do support virtual machines (VMs), but if you run them under Windows, there might be problems with docker and its network configurations.
+Please use Windows 10, OSX or Linux.
+You can also use a VM from a cloud provider to test StreamPipes.
+
+
+## Only few processors are available in the pipeline editor
+**Problem:** In the Pipeline Editor, only a few processors can be used in pipelines.
+
+**Solution:** In the demo/desktop version, we only integrated a few processors. To ensure that you can easily try out StreamPipes (even on your laptop),
+ we tried to make it as lightweight as possible. If you are interested in more sophisticated algorithms, pleas contact us.
+
+
+## No data is shown in the live dashboard
+**Problem:** The live dashboard does not show any data.
+
+**Solution:** If this is the case, your IP is probably configured wrong.
+You can reinstall the system by running `streampipes clean` and then `streampipes start` again.
+This will delete all StreamPipes configurations. StreamPipes is designed as a server application and requires a fixed IP.
+We created a version to easily run it on your laptop and test it, but on your laptop you usually get a new IP when you change the network.
+This problem only occurs in testing scenarios, in production scenarios the IP can also be changed manually without data loss.
+
+## Windows 10: Should I use settings windows containers or docker containers
+**Problem:** StreamPipes does not work with Windows 10.
+
+**Solution:** You should use docker containers. Go to the docker settings on our taks bar and select 'Switch to Docker containers'.
+
+## Configurations are not deleted
+**Problem:** The configurations are not deleted from the host system. Even after manually removing the 'config/' folder StreamPipes settings are note deleted.
+Also the Consul settings are still there.
+
+**Solution:** Probably Docker did not mount a volume in the 'config/' folder. You must delete the anonymous docker volumes manually. See in docker [documentation](https://docs.docker.com/engine/reference/commandline/volume_rm/).
+
+
+## Docker Network already used
+**Problem:** When starting StreamPipes the error message: "Creating network 'streampipes-cli_spnet' with driver 'bridge' Pool overlaps with other one on this address space" is shown.
+
+**Solution:** Delete old networks for example with "docker network prune".
+
+## Help us to improve StreamPipes and this documentation
+Help us to improve this section.
+If you have any problems with the system or with the documentation, do not hesitate to contact us.
+Our goal is to continuously improve StreamPipes.
+Your help and feedback is welcome.
diff --git a/documentation/docs/license.md b/documentation/docs/license.md
new file mode 100644
index 0000000..e69de29
diff --git a/documentation/docs/org.streampipes.processor.geo.flink/documentation.md b/documentation/docs/org.streampipes.processor.geo.flink/documentation.md
new file mode 100644
index 0000000..c0b2e07
--- /dev/null
+++ b/documentation/docs/org.streampipes.processor.geo.flink/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processor.geo.flink
+title: Spatial Grid Enrichment
+sidebar_label: Spatial Grid Enrichment
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processor.geo.flink/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Groups spatial events into cells of a given size
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processor.imageclassification.jvm.generic-image-classification/documentation.md b/documentation/docs/org.streampipes.processor.imageclassification.jvm.generic-image-classification/documentation.md
new file mode 100644
index 0000000..8c1ce2b
--- /dev/null
+++ b/documentation/docs/org.streampipes.processor.imageclassification.jvm.generic-image-classification/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processor.imageclassification.jvm.generic-image-classification
+title: Generic Image Classification
+sidebar_label: Generic Image Classification
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processor.imageclassification.jvm.generic-image-classification/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Image  + Classification Description (Generic Model)
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processor.imageclassification.jvm.image-enricher/documentation.md b/documentation/docs/org.streampipes.processor.imageclassification.jvm.image-enricher/documentation.md
new file mode 100644
index 0000000..b9444b8
--- /dev/null
+++ b/documentation/docs/org.streampipes.processor.imageclassification.jvm.image-enricher/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processor.imageclassification.jvm.image-enricher
+title: Image Enricher
+sidebar_label: Image Enricher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processor.imageclassification.jvm.image-enricher/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Image Enrichment: Enriches an  + image with  + given bounding box coordinates
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processor.imageclassification.qrcode/documentation.md b/documentation/docs/org.streampipes.processor.imageclassification.qrcode/documentation.md
new file mode 100644
index 0000000..89ed2b0
--- /dev/null
+++ b/documentation/docs/org.streampipes.processor.imageclassification.qrcode/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processor.imageclassification.qrcode
+title: QR Code Reader
+sidebar_label: QR Code Reader
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processor.imageclassification.qrcode/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+QR Code Reader: Detects a QR Code in an image
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.aggregation.flink.aggregation/documentation.md b/documentation/docs/org.streampipes.processors.aggregation.flink.aggregation/documentation.md
new file mode 100644
index 0000000..315437f
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.aggregation.flink.aggregation/documentation.md
@@ -0,0 +1,43 @@
+---
+id: org.streampipes.processors.aggregation.flink.aggregation
+title: Aggregation
+sidebar_label: Aggregation
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.aggregation.flink.aggregation/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Performs different aggregation functions based on a sliding time window (e.g., average, sum, min, max)
+
+***
+
+## Required input
+
+The aggregation processor requires a data stream that has at least one field containing a numerical value.
+
+***
+
+## Configuration
+
+### Group by
+The aaggregation function can be calculated separately (partitioned) by the selected field value. 
+
+### Output every
+The frequency in which aggregated values are sent in seconds.
+
+### Time window
+The size of the time window in seconds
+
+### Aggregated Value
+The field used for calculating the aggregation value.
+
+## Output
+
+This processor appends the latest aggregated value to every input event that arrives.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.aggregation.flink.rate/documentation.md b/documentation/docs/org.streampipes.processors.aggregation.flink.rate/documentation.md
new file mode 100644
index 0000000..877b97c
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.aggregation.flink.rate/documentation.md
@@ -0,0 +1,32 @@
+---
+id: org.streampipes.processors.aggregation.flink.rate
+title: Event Rate
+sidebar_label: Event Rate
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.aggregation.flink.rate/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Computes the current event rate. Output is a number representing events per second.
+
+***
+
+## Required input
+
+The event rate processor works with any stream and does not have any specific requirements.
+
+***
+
+## Configuration
+
+### Time Baseline
+Time window size used for calculating the rate in seconds, also defines the output rate
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.enricher.flink.processor.math.mathop/documentation.md b/documentation/docs/org.streampipes.processors.enricher.flink.processor.math.mathop/documentation.md
new file mode 100644
index 0000000..c02b9b5
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.enricher.flink.processor.math.mathop/documentation.md
@@ -0,0 +1,38 @@
+---
+id: org.streampipes.processors.enricher.flink.processor.math.mathop
+title: Math
+sidebar_label: Math
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.enricher.flink.processor.math.mathop/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Performs calculations on event properties (+, -, *, /, %).
+
+***
+
+## Required input
+The math processor works with any event that has at least one field containing a numerical value.
+
+***
+
+## Configuration
+
+### Left operand
+The field from the input event that should be used as the left operand.
+
+### Right operand
+The field from the input event that should be used as the right operand.
+
+### Operation
+The math operation that should be performed.
+
+## Output
+The processor appends the calculation result to each input event.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.enricher.flink.processor.math.staticmathop/documentation.md b/documentation/docs/org.streampipes.processors.enricher.flink.processor.math.staticmathop/documentation.md
new file mode 100644
index 0000000..62851c1
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.enricher.flink.processor.math.staticmathop/documentation.md
@@ -0,0 +1,38 @@
+---
+id: org.streampipes.processors.enricher.flink.processor.math.staticmathop
+title: Static Math
+sidebar_label: Static Math
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.enricher.flink.processor.math.staticmathop/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Performs calculation on an event property with a static value (+, -, *, /, %).
+
+***
+
+## Required input
+The math processor works with any event that has at least one field containing a numerical value.
+
+***
+
+## Configuration
+
+### Left operand
+The field from the input event that should be used as the left operand.
+
+### Right operand value
+Specify the value of the right operand.
+
+### Operation
+The math operation that should be performed.
+
+## Output
+The processor appends the calculation result to each input event.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.enricher.flink.processor.trigonometry/documentation.md b/documentation/docs/org.streampipes.processors.enricher.flink.processor.trigonometry/documentation.md
new file mode 100644
index 0000000..e52031e
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.enricher.flink.processor.trigonometry/documentation.md
@@ -0,0 +1,38 @@
+---
+id: org.streampipes.processors.enricher.flink.processor.trigonometry
+title: Trigonometry Functions
+sidebar_label: Trigonometry Functions
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.enricher.flink.processor.trigonometry/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Performs Trigonometric functions (sin, cos, tan) on event properties.
+
+***
+
+## Required input
+The trigonometry processor works with any event that has at least one field containing a numerical value.
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### Alpha
+The field that should be used for calculating the trigonometric function.
+
+
+### Operation
+The trigonometric function that should be calculated.
+
+## Output
+The processor appends the calculation result to each input event.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.enricher.flink.processor.urldereferencing/documentation.md b/documentation/docs/org.streampipes.processors.enricher.flink.processor.urldereferencing/documentation.md
new file mode 100644
index 0000000..112fbd9
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.enricher.flink.processor.urldereferencing/documentation.md
@@ -0,0 +1,33 @@
+---
+id: org.streampipes.processors.enricher.flink.processor.urldereferencing
+title: URL Dereferencing
+sidebar_label: URL Dereferencing
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.enricher.flink.processor.urldereferencing/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Parses and appends the html page as a string to event.
+
+***
+
+## Required input
+The URL Dereferencing processor requires an input stream that provides an input field of type 'string', representing 
+the URL to dereference.
+
+***
+
+## Configuration
+
+### URL
+The field containing the URL to dereference.
+
+## Output
+The processor appends the extracted HTML page to each input event.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.enricher.flink.timestamp/documentation.md b/documentation/docs/org.streampipes.processors.enricher.flink.timestamp/documentation.md
new file mode 100644
index 0000000..131c445
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.enricher.flink.timestamp/documentation.md
@@ -0,0 +1,30 @@
+---
+id: org.streampipes.processors.enricher.flink.timestamp
+title: Timestamp Enricher
+sidebar_label: Timestamp Enricher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.enricher.flink.timestamp/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+Appends the current time in ms to the event payload.
+
+***
+
+## Required input
+The timestamp enricher works with any input event.
+
+***
+
+## Configuration
+
+(no further configuration required)
+
+## Output
+This processor appends the current system time to every input event.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.filters.jvm.compose/documentation.md b/documentation/docs/org.streampipes.processors.filters.jvm.compose/documentation.md
new file mode 100644
index 0000000..cb02afc
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.filters.jvm.compose/documentation.md
@@ -0,0 +1,32 @@
+---
+id: org.streampipes.processors.filters.jvm.compose
+title: Compose
+sidebar_label: Compose
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.filters.jvm.compose/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Merges two event streams. Any time, a new input event arrives, it is merged with the last input event from the other 
+event stream and forwarded.
+
+***
+
+## Required input
+The Compose processor does not have any specific input requirements.
+
+***
+
+## Configuration
+
+(no further configuration required)
+
+## Output
+The compose processor has a configurable output that can be selected by the user at pipeline modeling time.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.filters.jvm.numericalfilter/documentation.md b/documentation/docs/org.streampipes.processors.filters.jvm.numericalfilter/documentation.md
new file mode 100644
index 0000000..28599b2
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.filters.jvm.numericalfilter/documentation.md
@@ -0,0 +1,38 @@
+---
+id: org.streampipes.processors.filters.jvm.numericalfilter
+title: Numerical Filter
+sidebar_label: Numerical Filter
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.filters.jvm.numericalfilter/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+The Numerical Filter processor filters numerical values based on a given threshold.
+
+***
+
+## Required input
+The processor works with any input event that has one field containing a numerical value.
+
+***
+
+## Configuration
+
+### Field
+Specifies the field name where the filter operation should be applied on.
+
+
+### Operation
+Specifies the filter operation that should be applied on the field.
+
+### Threshold value
+Specifies the threshold value.
+
+## Output
+The processor outputs the input event if it satisfies the filter expression.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.filters.jvm.project/documentation.md b/documentation/docs/org.streampipes.processors.filters.jvm.project/documentation.md
new file mode 100644
index 0000000..3ea631a
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.filters.jvm.project/documentation.md
@@ -0,0 +1,30 @@
+---
+id: org.streampipes.processors.filters.jvm.project
+title: Projection
+sidebar_label: Projection
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.filters.jvm.project/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+Outputs a selectable subset of an input event type.
+
+***
+
+## Required input
+The project processor works with any input event stream.
+
+***
+
+## Configuration
+
+(no further configuration required)
+
+## Output
+The output depends on the fields selected at pipeline development time.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.filters.jvm.textfilter/documentation.md b/documentation/docs/org.streampipes.processors.filters.jvm.textfilter/documentation.md
new file mode 100644
index 0000000..1a0e6b1
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.filters.jvm.textfilter/documentation.md
@@ -0,0 +1,35 @@
+---
+id: org.streampipes.processors.filters.jvm.textfilter
+title: Text Filter
+sidebar_label: Text Filter
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.filters.jvm.textfilter/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+The Text Filter processor filters text values based on a given string.
+
+***
+
+## Required input
+The processor works with any input event that has one field containing a text.
+
+***
+
+## Configuration
+
+### Text Field
+The field containing the text that should be filtered.
+
+
+### Operation
+The operation used by the filter processor (equals or matches)
+
+## Output
+The processor outputs the input event if it satisfies the filter expression.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.pattern-detection.flink.peak-detection/documentation.md b/documentation/docs/org.streampipes.processors.pattern-detection.flink.peak-detection/documentation.md
new file mode 100644
index 0000000..fb147af
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.pattern-detection.flink.peak-detection/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.pattern-detection.flink.peak-detection
+title: Peak Detection
+sidebar_label: Peak Detection
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.pattern-detection.flink.peak-detection/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Detect peaks in time series data
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.siddhi.frequency/documentation.md b/documentation/docs/org.streampipes.processors.siddhi.frequency/documentation.md
new file mode 100644
index 0000000..a95c372
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.siddhi.frequency/documentation.md
@@ -0,0 +1,33 @@
+---
+id: org.streampipes.processors.siddhi.frequency
+title: Frequency Calculation
+sidebar_label: Frequency Calculation
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.siddhi.frequency/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Calculates the frequency of the event stream.
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.siddhi.frequencychange/documentation.md b/documentation/docs/org.streampipes.processors.siddhi.frequencychange/documentation.md
new file mode 100644
index 0000000..a552e4b
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.siddhi.frequencychange/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.siddhi.frequencychange
+title: Frequency Change
+sidebar_label: Frequency Change
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.siddhi.frequencychange/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Detects when the frequency of the event stream changes
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.siddhi.increase/documentation.md b/documentation/docs/org.streampipes.processors.siddhi.increase/documentation.md
new file mode 100644
index 0000000..84a550c
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.siddhi.increase/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.siddhi.increase
+title: Trend
+sidebar_label: Trend
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.siddhi.increase/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Detects the increase of a numerical field over a customizable time window. Example: A temperature value increases by 10 percent within 5 minutes.
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.siddhi.numericalfilter/documentation.md b/documentation/docs/org.streampipes.processors.siddhi.numericalfilter/documentation.md
new file mode 100644
index 0000000..25d29e7
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.siddhi.numericalfilter/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.siddhi.numericalfilter
+title: Numerical Filter (Siddhi)
+sidebar_label: Numerical Filter (Siddhi)
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.siddhi.numericalfilter/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.siddhi.sequence/documentation.md b/documentation/docs/org.streampipes.processors.siddhi.sequence/documentation.md
new file mode 100644
index 0000000..dd25bea
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.siddhi.sequence/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.siddhi.sequence
+title: Sequence Detection
+sidebar_label: Sequence Detection
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.siddhi.sequence/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Merges events from two event streams, when the top event arrives first and then the bottom event
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.siddhi.stop/documentation.md b/documentation/docs/org.streampipes.processors.siddhi.stop/documentation.md
new file mode 100644
index 0000000..c419596
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.siddhi.stop/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.siddhi.stop
+title: Stream Stop Detection
+sidebar_label: Stream Stop Detection
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.siddhi.stop/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Triggers an event when the input data stream stops sending events
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.textmining.flink.languagedetection/documentation.md b/documentation/docs/org.streampipes.processors.textmining.flink.languagedetection/documentation.md
new file mode 100644
index 0000000..51cddc8
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.textmining.flink.languagedetection/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.textmining.flink.languagedetection
+title: Language Detection
+sidebar_label: Language Detection
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.textmining.flink.languagedetection/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Detects the language of a written text.
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.textmining.flink.wordcount/documentation.md b/documentation/docs/org.streampipes.processors.textmining.flink.wordcount/documentation.md
new file mode 100644
index 0000000..a12f575
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.textmining.flink.wordcount/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.textmining.flink.wordcount
+title: Word Count
+sidebar_label: Word Count
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.textmining.flink.wordcount/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Counts words on continuous text-based streams
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.flink.field-converter/documentation.md b/documentation/docs/org.streampipes.processors.transformation.flink.field-converter/documentation.md
new file mode 100644
index 0000000..3146b6f
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.flink.field-converter/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.flink.field-converter
+title: Field Converter
+sidebar_label: Field Converter
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.flink.field-converter/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Converts a string value to a number data type
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.flink.field-mapper/documentation.md b/documentation/docs/org.streampipes.processors.transformation.flink.field-mapper/documentation.md
new file mode 100644
index 0000000..3bd914d
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.flink.field-mapper/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.flink.field-mapper
+title: Field Mapper
+sidebar_label: Field Mapper
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.flink.field-mapper/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Replaces one or more field with a new field and computes a hash value of these fields
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.flink.field-renamer/documentation.md b/documentation/docs/org.streampipes.processors.transformation.flink.field-renamer/documentation.md
new file mode 100644
index 0000000..a04bf3a
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.flink.field-renamer/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.flink.field-renamer
+title: Field Renamer
+sidebar_label: Field Renamer
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.flink.field-renamer/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Replaces the runtime name of an event property with a custom defined name. Useful for data ingestion purposes where a specific event schema is needed.
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.flink.fieldhasher/documentation.md b/documentation/docs/org.streampipes.processors.transformation.flink.fieldhasher/documentation.md
new file mode 100644
index 0000000..7ece96e
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.flink.fieldhasher/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.flink.fieldhasher
+title: Field Hasher
+sidebar_label: Field Hasher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.flink.fieldhasher/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+The Field Hasher uses an algorithm to encode values in a field. The Field Hasher can use MD5, SHA1 or SHA2 to hash field values.
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.flink.measurement-unit-converter/documentation.md b/documentation/docs/org.streampipes.processors.transformation.flink.measurement-unit-converter/documentation.md
new file mode 100644
index 0000000..120f481
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.flink.measurement-unit-converter/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.flink.measurement-unit-converter
+title: Measurement Unit Converter
+sidebar_label: Measurement Unit Converter
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.flink.measurement-unit-converter/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Converts a unit of measurement to another one
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.flink.processor.boilerplate/documentation.md b/documentation/docs/org.streampipes.processors.transformation.flink.processor.boilerplate/documentation.md
new file mode 100644
index 0000000..3e13244
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.flink.processor.boilerplate/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.flink.processor.boilerplate
+title: Boilerplate Removal
+sidebar_label: Boilerplate Removal
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.flink.processor.boilerplate/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Removes boilerplate and extract fulltext from HTML
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.jvm.changed-value/documentation.md b/documentation/docs/org.streampipes.processors.transformation.jvm.changed-value/documentation.md
new file mode 100644
index 0000000..b871bc8
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.jvm.changed-value/documentation.md
@@ -0,0 +1,35 @@
+---
+id: org.streampipes.processors.transformation.jvm.changed-value
+title: Value Changed
+sidebar_label: Value Changed
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.jvm.changed-value/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This processor sends out an event everytime a specific object changes. It also adds a timestamp in ms from the system time.
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.jvm.count-array/documentation.md b/documentation/docs/org.streampipes.processors.transformation.jvm.count-array/documentation.md
new file mode 100644
index 0000000..b93f05f
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.jvm.count-array/documentation.md
@@ -0,0 +1,37 @@
+---
+id: org.streampipes.processors.transformation.jvm.count-array
+title: Count Array
+sidebar_label: Count Array
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.jvm.count-array/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This processor takes a list field, computes the size of the list and appends the result to the event.
+
+***
+
+## Required input
+
+This processor works with any event that has a field of type ``list``.
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### List Field
+
+The field containing the list that should be used.
+
+## Output
+
+Outputs the incoming event while appending the list size (named ``countValue``) to the incoming event.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.jvm.duration-value/documentation.md b/documentation/docs/org.streampipes.processors.transformation.jvm.duration-value/documentation.md
new file mode 100644
index 0000000..7c6c126
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.jvm.duration-value/documentation.md
@@ -0,0 +1,36 @@
+---
+id: org.streampipes.processors.transformation.jvm.duration-value
+title: Calculate Duration
+sidebar_label: Calculate Duration
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.jvm.duration-value/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This processor calculates the duration for a given stream with a start timestamp and an end timestamp.
+Add a detailed description here
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### 1st parameter
+
+
+### 2nd parameter
+
+## Output
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.jvm.processor.timestampextractor/documentation.md b/documentation/docs/org.streampipes.processors.transformation.jvm.processor.timestampextractor/documentation.md
new file mode 100644
index 0000000..a3ad750
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.jvm.processor.timestampextractor/documentation.md
@@ -0,0 +1,40 @@
+---
+id: org.streampipes.processors.transformation.jvm.processor.timestampextractor
+title: Timestamp Extractor
+sidebar_label: Timestamp Extractor
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.jvm.processor.timestampextractor/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This processor extracts a timestamp into the individual time fields (e.g. day field, hour field, ....)
+
+***
+
+## Required input
+
+This processor requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema
+.org/DateTime``.
+
+***
+
+## Configuration
+
+### Timestamp Field
+
+The field of the event containing the timestamp to parse.
+
+### Extract Fields
+
+Select the individual parts of the timestamp that should be extracted, e.g., Year, Minute and Day.
+
+## Output
+
+The output of this processor is a new event that contains the fields selected by the ``Extract Fields`` parameter.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.processors.transformation.jvm.split-array/documentation.md b/documentation/docs/org.streampipes.processors.transformation.jvm.split-array/documentation.md
new file mode 100644
index 0000000..5ce4fe5
--- /dev/null
+++ b/documentation/docs/org.streampipes.processors.transformation.jvm.split-array/documentation.md
@@ -0,0 +1,42 @@
+---
+id: org.streampipes.processors.transformation.jvm.split-array
+title: Split Array
+sidebar_label: Split Array
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.processors.transformation.jvm.split-array/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This processor takes an array of event properties and creates an event for each of them. Further property of the events can be added to each element
+Add a detailed description here
+
+***
+
+## Required input
+
+This processor works with any event that has a field of type ``list``.
+
+***
+
+## Configuration
+
+### Keep Fields
+
+Fields of the event that should be kept in each resulting event.
+
+### List field
+
+The name of the field that contains the list values that should be split.
+
+
+## Output
+
+This data processor produces an event with all fields selected by the ``Keep Fields`` parameter and all fields of the
+ selected list field.
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.brokers.jvm.jms/documentation.md b/documentation/docs/org.streampipes.sinks.brokers.jvm.jms/documentation.md
new file mode 100644
index 0000000..1421f20
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.brokers.jvm.jms/documentation.md
@@ -0,0 +1,42 @@
+---
+id: org.streampipes.sinks.brokers.jvm.jms
+title: JMS Publisher
+sidebar_label: JMS Publisher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.brokers.jvm.jms/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Publishes events to a message broker (e.g., ActiveMQ) using the Java Message Service (JMS) protocol.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+### JMS Broker Settings
+
+The basic settings to connect to the broker. 
+The JMS broker URL indicates the URL of the broker (e.g., tcp://localhost), the port indicates the port of the broker
+ (e.g., 61616)
+
+
+### JMS Topic
+
+The topic where events should be sent to.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.brokers.jvm.kafka/documentation.md b/documentation/docs/org.streampipes.sinks.brokers.jvm.kafka/documentation.md
new file mode 100644
index 0000000..8ffd9e3
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.brokers.jvm.kafka/documentation.md
@@ -0,0 +1,43 @@
+---
+id: org.streampipes.sinks.brokers.jvm.kafka
+title: Kafka Publisher
+sidebar_label: Kafka Publisher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.brokers.jvm.kafka/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Publishes events to Apache Kafka.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+### Kafka Broker Settings
+
+The basic settings to connect to the broker. 
+The Kafka broker URL indicates the URL of the broker (e.g., localhost), the port indicates the port of the broker
+ (e.g., 9092)
+
+
+### Kafka Topic
+
+The topic where events should be sent to.
+
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.brokers.jvm.rabbitmq/documentation.md b/documentation/docs/org.streampipes.sinks.brokers.jvm.rabbitmq/documentation.md
new file mode 100644
index 0000000..4a745c3
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.brokers.jvm.rabbitmq/documentation.md
@@ -0,0 +1,56 @@
+---
+id: org.streampipes.sinks.brokers.jvm.rabbitmq
+title: RabbitMQ Publisher
+sidebar_label: RabbitMQ Publisher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.brokers.jvm.rabbitmq/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Forwards events to a RabbitMQ broker
+Add a detailed description here
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+### Host
+
+The hostname of the RabbitMQ broker.
+
+### Port
+
+The port of the RabbitMQ broker.
+
+### User
+
+The username used to connect to the RabbitMQ broker.
+
+### Password
+
+The password used to connect to the RabbitMQ broker.
+
+### Exchange Name
+
+The name of the exchange.
+
+### RabbitMQ Topic
+
+The topic where events should be sent to.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.brokers.jvm.rest/documentation.md b/documentation/docs/org.streampipes.sinks.brokers.jvm.rest/documentation.md
new file mode 100644
index 0000000..b517203
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.brokers.jvm.rest/documentation.md
@@ -0,0 +1,35 @@
+---
+id: org.streampipes.sinks.brokers.jvm.rest
+title: REST Publisher
+sidebar_label: REST Publisher
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.brokers.jvm.rest/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Posts a JSON representation of an event to a REST interface.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+### REST URL
+
+The complete URL of the REST endpoint.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.databases.flink.elasticsearch/documentation.md b/documentation/docs/org.streampipes.sinks.databases.flink.elasticsearch/documentation.md
new file mode 100644
index 0000000..ef0ce61
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.databases.flink.elasticsearch/documentation.md
@@ -0,0 +1,42 @@
+---
+id: org.streampipes.sinks.databases.flink.elasticsearch
+title: Elasticsearch
+sidebar_label: Elasticsearch
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.databases.flink.elasticsearch/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Stores data in an Elasticsearch database.
+
+***
+
+## Required input
+
+This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema
+.org/DateTime``.
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### Timestamp Field
+
+The field which contains the required timestamp.
+
+### Index Name
+
+The name of the Elasticsearch index where events are stored to.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.databases.jvm.couchdb/documentation.md b/documentation/docs/org.streampipes.sinks.databases.jvm.couchdb/documentation.md
new file mode 100644
index 0000000..145949b
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.databases.jvm.couchdb/documentation.md
@@ -0,0 +1,45 @@
+---
+id: org.streampipes.sinks.databases.jvm.couchdb
+title: CouchDB
+sidebar_label: CouchDB
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.databases.jvm.couchdb/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Stores events in an Apache CouchDB database.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### Hostname
+
+The hostname of the CouchDB instance.
+
+### Port
+
+The port of the CouchDB instance.
+
+### Database Name
+
+The name of the database where events will be stored
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.databases.jvm.influxdb/documentation.md b/documentation/docs/org.streampipes.sinks.databases.jvm.influxdb/documentation.md
new file mode 100644
index 0000000..8c59fef
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.databases.jvm.influxdb/documentation.md
@@ -0,0 +1,67 @@
+---
+id: org.streampipes.sinks.databases.jvm.influxdb
+title: InfluxDB
+sidebar_label: InfluxDB
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.databases.jvm.influxdb/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Stores events in an InfluxDB.
+
+***
+
+## Required input
+
+This sink requires an event that provides a timestamp value (a field that is marked to be of type ``http://schema
+.org/DateTime``.
+
+***
+
+## Configuration
+
+### Hostname
+
+The hostname/URL of the InfluxDB instance. (Include http(s)://).
+
+### Port
+
+The port of the InfluxDB instance.
+
+### Database Name
+
+The name of the database where events will be stored.
+
+### Measurement Name
+
+The name of the Measurement where events will be stored (will be created if it does not exist).
+
+### Username
+
+The username for the InfluxDB Server.
+
+### Password
+
+The password for the InfluxDB Server.
+
+### Timestamp Field
+
+The field which contains the required timestamp.
+
+### Buffer Size
+
+Indicates how many events are written into a buffer, before they are written to the database.
+
+### Maximum Flush
+
+The maximum waiting time for the buffer to fill the Buffer size before it will be written to the database in ms.
+## Output
+
+(not applicable for data sinks)
diff --git a/documentation/docs/org.streampipes.sinks.databases.jvm.postgresql/documentation.md b/documentation/docs/org.streampipes.sinks.databases.jvm.postgresql/documentation.md
new file mode 100644
index 0000000..746b455
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.databases.jvm.postgresql/documentation.md
@@ -0,0 +1,55 @@
+---
+id: org.streampipes.sinks.databases.jvm.postgresql
+title: PostgreSQL
+sidebar_label: PostgreSQL
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.databases.jvm.postgresql/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Stores events in a Postgres database.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+### Hostname
+
+The hostname of the PostgreSQL instance.
+
+### Port
+
+The port of the PostgreSQL instance (default 5432).
+
+### Database Name
+
+The name of the database where events will be stored
+
+### Table Name
+
+The name of the table where events will be stored (will be created if it does not exist)
+
+### Username
+
+The username for the PostgreSQL Server.
+
+### Password
+
+The password for the PostgreSQL Server.
+
+## Output
+
+(not applicable for data sinks)
diff --git a/documentation/docs/org.streampipes.sinks.internal.jvm.dashboard/documentation.md b/documentation/docs/org.streampipes.sinks.internal.jvm.dashboard/documentation.md
new file mode 100644
index 0000000..7d1ab57
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.internal.jvm.dashboard/documentation.md
@@ -0,0 +1,34 @@
+---
+id: org.streampipes.sinks.internal.jvm.dashboard
+title: Dashboard Sink
+sidebar_label: Dashboard Sink
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.internal.jvm.dashboard/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This sink visualizes data streams in the StreamPipes dashboard. 
+Visualizations can be configured in Live Dashboard of StreamPipes after the pipeline has been started.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+No further configuration necessary, individual visualizations can be configured in the Dashboard itself.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.internal.jvm.notification/documentation.md b/documentation/docs/org.streampipes.sinks.internal.jvm.notification/documentation.md
new file mode 100644
index 0000000..c21bd14
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.internal.jvm.notification/documentation.md
@@ -0,0 +1,40 @@
+---
+id: org.streampipes.sinks.internal.jvm.notification
+title: Notification
+sidebar_label: Notification
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.internal.jvm.notification/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Displays a notification in the UI panel of StreamPipes.
+
+***
+
+## Required input
+
+
+***
+
+## Configuration
+
+This sink does not have any requirements and works with any incoming event type.
+
+### Notification Title
+
+The title of the notification.
+
+### Content
+
+The notification message.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.notifications.jvm.email/documentation.md b/documentation/docs/org.streampipes.sinks.notifications.jvm.email/documentation.md
new file mode 100644
index 0000000..87c64f6
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.notifications.jvm.email/documentation.md
@@ -0,0 +1,49 @@
+---
+id: org.streampipes.sinks.notifications.jvm.email
+title: Email Notification
+sidebar_label: Email Notification
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.notifications.jvm.email/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This sink sends an email to a specified receiver.
+
+Before you use this sink, the settings of your email server need to be configured.
+After you've installed the element, navigate to ``Settings``, open the panel ``Sinks Notifications JVM`` and add your
+ mail server and credentials.
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+The following configuration is required:
+
+### Receiver Address
+
+The email address of the receiver.
+
+### Subject
+
+The subject of the email.
+
+### Content
+
+The mail text.
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.notifications.jvm.onesignal/documentation.md b/documentation/docs/org.streampipes.sinks.notifications.jvm.onesignal/documentation.md
new file mode 100644
index 0000000..5f16b23
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.notifications.jvm.onesignal/documentation.md
@@ -0,0 +1,45 @@
+---
+id: org.streampipes.sinks.notifications.jvm.onesignal
+title: OneSignal
+sidebar_label: OneSignal
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.notifications.jvm.onesignal/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+This sink sends a push message to the OneSignal application
+
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### App Id
+
+The OneSignal application ID.
+
+### API Key
+
+The OneSignal API key.
+
+### Content
+
+The message that should be sent to OneSignal
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/org.streampipes.sinks.notifications.jvm.slack/documentation.md b/documentation/docs/org.streampipes.sinks.notifications.jvm.slack/documentation.md
new file mode 100644
index 0000000..f0493d9
--- /dev/null
+++ b/documentation/docs/org.streampipes.sinks.notifications.jvm.slack/documentation.md
@@ -0,0 +1,50 @@
+---
+id: org.streampipes.sinks.notifications.jvm.slack
+title: Slack Notification
+sidebar_label: Slack Notification
+---
+
+
+
+<p align="center"> 
+    <img src="/img/pipeline-elements/org.streampipes.sinks.notifications.jvm.slack/icon.png" width="150px;" class="pe-image-documentation"/>
+</p>
+
+***
+
+## Description
+
+Slack bot to send notifications directly into your slack
+
+Before you use this sink, the Slack token needs to be configured.
+After you've installed the element, navigate to ``Settings``, open the panel ``Sinks Notifications JVM`` and add your
+Slack API token.
+***
+
+## Required input
+
+This sink does not have any requirements and works with any incoming event type.
+
+***
+
+## Configuration
+
+Describe the configuration parameters here
+
+### Receiver
+
+The receiver of the Slack message.
+
+### Channel Type
+
+The channel type, should be "User" or "Channel"
+
+### Content
+
+The message that should be sent.
+
+### 2nd parameter
+
+## Output
+
+(not applicable for data sinks)
\ No newline at end of file
diff --git a/documentation/docs/pipeline-elements.md b/documentation/docs/pipeline-elements.md
new file mode 100644
index 0000000..3517252
--- /dev/null
+++ b/documentation/docs/pipeline-elements.md
@@ -0,0 +1,7 @@
+---
+id: pipeline-elements
+title: Overview
+sidebar_label: Overview
+---
+
+<div class="pe-grid-container"><div class="pe-container-item pe-container-item-processor"><div class="pe-container-item-header"><div class="pe-container-item-icon pe-icon-processor"><img class="pe-icon" src="/img/pipeline-elements/org.streampipes.processors.aggregation.flink.aggregation/icon.png"></div><div class="pe-container-item-header-pe"><div class="pe-container-item-label pe-container-item-label-processor">Data Processor</div><div class="pe-container-item-label-name">Aggregation</d [...]
\ No newline at end of file
diff --git a/documentation/docs/privacy.md b/documentation/docs/privacy.md
new file mode 100644
index 0000000..1ba41d3
--- /dev/null
+++ b/documentation/docs/privacy.md
@@ -0,0 +1,36 @@
+---
+id: privacy
+title: Datenschutzerklärung
+sidebar_label: Privacy Notice
+---
+
+## Verantwortliche Stelle
+FZI Forschungszentrum Informatik
+Haid-und-Neu-Straße 10-14
+76131 Karlsruhe
+
+E-Mail: datenschutz@fzi.de
+
+## Besuch der Webseite
+Wenn Sie auf unsere Webseite zugreifen werden automatisch folgende Informationen über Sie in Form von Server-Logfiles gespeichert:
+
+* Ihr Webbrowser
+* Ihr Betriebssystem
+* Ihre IP-Adresse und Port
+* Die aufgerufene Webseite oder Bestandteile davon
+* Die Webseite, von der Sie uns besuchen
+* Der Zugriffszeitpunkt
+* Diese Daten werden auf Basis von Art. 6 Abs. 1 lit. f DSGVO erhoben und der Zweck der Datenerhebung ist der Betrieb, die Wartung und der Schutz unserer Systeme. Die Daten fallen standardmäßig bei jedem Verbindungsaufbau mit einem Webserver im Internet an. Tritt ein Fehler bei der Nutzung unserer Webseite auf oder vermuten wir einen Angriff oder eine missbräuchliche Nutzung, dienen diese Daten dazu das eingetretene Szenario zu reproduzieren. Ihre IP-Adresse wird zusätzlich dazu genutzt  [...]
+
+## Cookies und Analyse-Tool
+Auf dieser Website werden unter Einsatz der Webanalysedienst-Software Matomo (www.matomo.org), einem Dienst des Anbieters InnoCraft Ltd., 150 Willis St, 6011 Wellington, Neuseeland, („Mataomo“) auf Basis unseres berechtigten Interesses an der statistischen Analyse des Nutzerverhaltens zu Optimierungszwecken unserer Webseite gemäß Art. 6 Abs. 1 lit. f DSGVO Daten gesammelt und gespeichert. Aus diesen Daten können zum selben Zweck pseudonymisierte Nutzungsprofile erstellt und ausgewertet w [...]
+
+## Ihre Rechte
+Als Betroffener einer Datenerfassung stehen Ihnen umfassende Rechte zu (Art. 15-23 DSGVO):
+
+* Das Recht jederzeit Auskunft über Ihre bei uns gespeicherten Daten zu erhalten
+* Das Recht die Einschränkung der Verarbeitung Ihrer Daten zu fordern oder der Verarbeitung zu widersprechen
+* Das Recht die Sperrung oder Löschung Ihrer Daten zu verlangen
+* Das Recht die Ihre Daten berichtigen zu lassen
+* Das Recht Ihre bei uns gespeicherten Daten in einem maschinenlesbaren Format zu erhalten („Datenübertragbarkeit“)
+* Möchten Sie Ihre Rechte geltend machen, schicken Sie uns einfach eine E-Mail an die oben genannte Kontaktadresse (datenschutz@fzi.de). Zusätzlich haben Sie natürlich jederzeit das Recht sich, bei Verdacht auf einen Verstoß gegen das Datenschutzrecht, bei der zuständigen Aufsichtsbehörde zu melden.
\ No newline at end of file
diff --git a/documentation/docs/user-guide-first-steps.md b/documentation/docs/user-guide-first-steps.md
new file mode 100644
index 0000000..8567ee0
--- /dev/null
+++ b/documentation/docs/user-guide-first-steps.md
@@ -0,0 +1,214 @@
+---
+id: user-guide-first-steps
+title: First steps
+sidebar_label: First steps
+---
+
+In this section, you'll learn how to create and run pipelines in StreamPipes.
+Before starting with this guide, all steps from the installation guide must be finished successfully and StreamPipes must be up and running.
+
+This guide consists of three steps:
+First, you'll learn how to create your first pipeline using the interactive tutorial.
+Afterwards, this section contains two examples on how to create pipelines based on the built-in data simulator.
+ 
+## Interactive Tutorial
+The easiest way to get started with StreamPipes is the interactive tutorial. Once you open the pipeline editor for the first time, you'll see a splash screen as shown below.
+Click **Start tour** to start the interactive tutorial, which will guide you through your first steps with StreamPipes.
+ 
+![Interactive Tutorial](/img/quickstart/interactive-tutorial.png)  
+
+If the splash screen does not show up, make sure you've installed the following pipeline elements:
+* Flow rate (data stream)
+* Numerical Filter (processing element)
+* Dashboard (data sink)
+
+After you've finished this very first tour, try the following tours to learn more about other features of StreamPipes:
+* Open the Live Dashboard and start the dashboard tour (by clicking the school icon in the top menu bar) to learn how to create real-time visualizations
+* Open StreamPipes Connect and start the tour to learn how to connect new data sources with StreamPipes.
+ 
+## First Examples
+
+In this tutorial, you will create two simple pipelines that demonstrate the basic functionality of StreamPipes.
+The first example deals with monitoring a flow rate sensor.
+This pipeline ensures that everything works properly and data is sent through the whole system.
+In the second example we model a more complex situation detection pipeline that triggers a notification.
+Let's start with our first pipeline.
+If you have problems with any of the examples, please send us an email.
+We are happy to help you.
+
+## Data Simulation
+All the data sources that we have integrated in the StreamPipes demonstrator are simulated according to real world sensors.
+For example a flow rate sensor in a water pipe, which measures how much water flows in that pipe or a water level sensor in a water tank, that reports how high the water level is in the tank.
+In the next section you will use the flow rate sensor to build your first pipeline.
+
+## Flow Rate Visualization
+In the first example, we create a live line chart to monitor the value of the mass flow from a flow rate sensor.
+
+### Create Pipeline
+* As a first step go to the pipeline editor
+* Select the **FRS1** (Flow Rate Source 1)source
+* Then navigate to the **Data Sink** tab
+* Select the **DS** (Dashboard Sink) and connect the source with the sink
+* After connecting the elements save the pipeline by clicking on the save button on the top left corner
+* In the save menu add a name *Flow Rate Monitoring* and a description *This is my very first pipeline!*
+* Also select the **Start pipeline immediatly** checkbox
+* Then click the button **Save and go to pipeline view**
+* You are navigated to the pipeline view and a confirmation that the pipeline was started successfully should be shown
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/01_PipelineEditor_DataStreams.png" alt="Show Pipeline Editor Streams">
+    <img src="/img/quickstart/examples_master/02_example1_source.png" alt="Sources">
+    <img src="/img/quickstart/examples_master/03_example1_pipeline_finished.png" alt="Pipeline Finished">
+    <img src="/img/quickstart/examples_master/04_example1_save.png" alt="Saved Pipeline">
+    <img src="/img/quickstart/examples_master/05_example1_pipeline_started.png" alt="Pipeline 01 Started">
+</div>
+
+### Create Visualization
+* After we have created the pipeline we must create the line chart
+* Navigate to the **Live Dashboard** of StreamPipes
+* Click on the **Add visualization** button
+* Select the just created pipeline **Flow Rate Monitoring** and click the **next** button
+* For the visualization select the **Line Chart** and click **next** again
+* Now you have to enter the configuration for the line chart
+    * Select time mapping: **timestamp**
+    * Select number mapping: **mass_flow**
+    * Range Minimum: **0** 
+    * Range Maximum: **10**
+* When all parameters are set correctly click the next button again.
+* Congratulation you created the first pipeline and should now see the line chart
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/06_example01_live_visualisation.png" alt="Visualize Pipeline">
+    <img src="/img/quickstart/examples_master/07_example01_first_step.png" alt="Configure Visualization Step 1">
+    <img src="/img/quickstart/examples_master/08_example01_second_step.png" alt="Configure Visualization Step 2">
+    <img src="/img/quickstart/examples_master/09_example01_third_step.png" alt="Configure Visualization Step 3">
+    <img src="/img/quickstart/examples_master/10_example1_finished.png" alt="Pipeline 01 Done">
+</div>
+
+
+## Condition monitoring of a water tank
+In our second example we are going to create a more complex pipeline.
+This pipeline has two sources, the flow rate sensor from the previous example and a source measuring the water level in a tank.
+Our goal is to send a notification when the flow rate stops and the water level in the water tank sinks too fast.
+In this case a service technician should check the system as soon as possible.
+This example should just illustrate how the individual components work.
+Since the system currently uses simulated data each time the situation occurs a notification is triggered.
+
+Now lets start!
+
+
+### Build the pipeline
+* First we have to select the **FRS1** (Flow Rate Sensor 1) and **WL1** (Water Level) form the sources tab
+* In a first step we want to detect when the flow rate stops
+* Use the **NF** (Numerical Filter) from the processing elements tab and connect it to the **FRS1** source
+* Configure the **Numerical Filter**:
+    * Field name for filter operator: **mass_flow**
+    * Filter Operation: **<**
+    * Threshold value: **1**
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/11_example2_start.png" alt="Start Example 2">
+    <img src="/img/quickstart/examples_master/12_example2_numerical.png" alt="Select Numericl Filter">
+    <img src="/img/quickstart/examples_master/13_example2_configure_numerical.png" alt="Configure Numerical Filter">
+</div>
+
+* As a next step we add an aggregation to the water level. This reduces the inaccuracies we have because the water moves in the tank. 
+* Select the **A** (Aggregation) processing element
+* Connect the **WL1** with **A**
+* Configure **Aggregation**:
+    * Property Selection: **level**
+    * Operation: **Average**
+    * Time Window Size: **10**
+    * Output Frequency: **1**
+    * Groupe By: **None**
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/14_example2_aggregate.png" alt="Select Aggregation">
+    <img src="/img/quickstart/examples_master/15_example2_configure_aggregate.png" alt="Configure Aggregation">
+        <img src="/img/quickstart/examples_master/15_example2_configure_aggregate_2.png" alt="Configure Aggregation">
+
+</div>
+
+* With the next processing element we check if the water level decreases too fast
+* Select **I** (Trend) and connect it to **A**
+* Configure **Trend**:
+    * Timestamp field: **timestamp**
+    * Value to observe: **aggregatedValue** (This value is created by previous element)
+    * Group by: **sensorId**
+    * Increase / Decrease: **Decrease**
+    * Percentage of Increase / Decrease: **20**
+    * Time Window Length: **10**
+    * Select Output: **All**
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/16_example2_increase.png" alt="Select Increase">
+    <img src="/img/quickstart/examples_master/17_example2_configure1_increase.png" alt="Configute 01 Increase">
+</div>
+
+* Now we connect the two stream with the sequence element, which checks if both events occur in a certain time
+* Select **S** (Sequence) and connect both data streams to it
+* Configure **Sequence**:
+    * Time Window Size: **1**
+    * Time Unit: **sec**
+    
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/19_example2_sequence.png" alt="Select Sequence Detection">
+    <img src="/img/quickstart/examples_master/20_example2_configure_sequence.png" alt="ConfigureSequence Detection">
+</div>
+
+* Now we create a notification for the service technician that something is wrong with the system
+* Select **N** (Notification) from the data sink tab
+* Connect **S**  with **N**
+* Configure **Notification**: 
+    * Notification title: **Alarm**
+    * Content: **One notification was triggered by our first complex pipeline. Yeahhhh!**
+    
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/21_example2_notification.png" alt="Select Notification">
+    <img src="/img/quickstart/examples_master/22_example2_configure_notification.png" alt="Configure Notification">
+</div>
+
+* Add the dashboard sink to the increase element to monitor the preliminary results
+* Select **DS** and connect to **I**
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/23_example2_dashboard_sink.png" alt="Select Dashboard">
+</div>
+
+* Save the pipeline 
+* Save configuration:
+    * Pipeline Name: **Second Pipeline**
+    * Description: **Complex monitorung rule**
+    * Start pipeline immediately: **CHECK**
+* Click **Save and go to pipeline view**
+ 
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/24_example2_save.png" alt="Save Pipeline">
+</div>
+
+* All pipeline elements should be started successfully 
+* It can be seen that the elements run on different technologies, in flink and a java container
+    * http://pe-flink-examples:8090
+    * http://pe-jvm-examples:8090
+* Go to visualization and create **Raw Data** visualization for the new pipeline   
+    
+<div class="my-carousel">
+    <img src="/img/quickstart/examples_master/25_example2_started.png" alt="Pipeline Started Sucessfull">
+    <img src="/img/quickstart/examples/26_example2_visualisation.png" alt="Add Visualization">
+</div>
+
+* Every time you can see output in the **Raw Data** visualization of the new pipeline and the **Line Chart** from the first example are zero, a Notification is triggered by the pipeline.
+* Go to the **Notifications** view and have a look at the notification
+
+<div class="my-carousel">
+    <img src="/img/quickstart/examples/27_example2_notification.png" alt="Show Notification">
+</div>
+
+
+Congratulation you finished the quick start!
+It is recommended to stop the last pipeline, because it will keep creating notifications ;)
+
+We hope we gave you an easy quick start into StreamPipes.
+If you have any questions or suggestions, just send us an email.
+From here on you can explore all features in the [User Guide](user-guide-introduction.md) or go to the [Developer Guide](dev-guide-introduction.md) to learn how to write your own StreamPipes processing elements.
+
diff --git a/documentation/docs/user-guide-installation.md b/documentation/docs/user-guide-installation.md
new file mode 100644
index 0000000..b9f406f
--- /dev/null
+++ b/documentation/docs/user-guide-installation.md
@@ -0,0 +1,140 @@
+---
+id: user-guide-installation
+title: Installation
+sidebar_label: Installation
+---
+## Prerequisites
+
+### Hardware
+
+* The **lite version** (if you simply want to run StreamPipes on a laptop for testing purposes) requires at least 8GB RAM, the **full version** requires at least 16 GB RAM.
+* Docker (latest version, see instructions below)
+* Docker Compose (latest version., see instructions below)
+
+### Supported operating systems
+We rely on Docker and support three operating systems for the StreamPipes system
+
+* Linux
+* OSX
+* Windows 10
+    * Please note that older Windows versions are not compatible with Docker. Also Linux VMs under Windows might not work, due to network problems with docker.
+
+### Web Browser
+StreamPipes is a modern web application, therefore you need a recent version of Chrome (recommended), Firefox or Edge.
+
+### Docker
+You need to have Docker installed on your system before you continue with the installation guide.
+
+
+<div class="admonition info">
+<div class="admonition-title">Install Docker</div>
+<p>Go to https://docs.docker.com/installation/ and follow the instructions to install Docker for your OS. Make sure docker can be started as a non-root user (described in the installation manual, don’t forget to log out and in again) and check that Docker is installed correctly by executing docker-run hello-world</p>
+</div>
+
+<div class="admonition info">
+<div class="admonition-title">Configure Docker</div>
+<p>By default, Docker uses only a limited number of CPU cores and memory.
+       If you run StreamPipes on Windows or on a Mac you need to adjust the default settings.
+       To do that, click on the Docker icon in your tab bar and open the preferences.
+       Go to the advanced preferences and set the **number of CPUs to 6** (recommended) and the **Memory to 8GB**.
+       After changing the settings, Docker needs to be restarted.</p></div>
+
+
+## Install StreamPipes
+
+<div class="tab-content" id="myTabContent">
+    <div class="tab-pane fade show active" id="linux" role="tabpanel" aria-labelledby="linux-tab">
+        <ul style="padding-left:0">
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">1</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                        <a href="https://github.com/streampipes/streampipes-installer/archive/master.zip">Download</a>
+                        the latest StreamPipes release from Github and extract the zip file to a directory of your choice.
+                    </div>
+                </div>
+            </li>
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">2</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                       <div style="margin-bottom:5px;"><b>Linux/Mac:</b> In a command prompt, open the folder <code>osx_linux</code> and run <code>./streampipes
+                            start</code>.<br/>
+                        </div>
+                        <div style="margin-top:5px;">
+                        <b>Windows 10:</b> In a command prompt, open the folder <code>windows10</code> and run <code>streampipes.bat
+                                                    start</code>.<br/>
+                        </div>
+                    </div>
+                </div>
+            </li>
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">3</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                       When asked, enter the version (full or lite) and the IP address or hostname of your server (called SERVER_IP in the next step).
+                    </div>
+                </div>
+            </li>
+            <li class="installation-step">
+                <div class="wrapper-container" style="align-items: center;justify-content: center;">
+                    <div class="wrapper-step">
+                        <span class="fa-stack fa-2x">
+                             <i class="fas fa-circle fa-stack-2x sp-color-green"></i>
+                             <strong class="fa-stack-1x" style="color:white;">4</strong>
+                        </span>
+                    </div>
+                    <div class="wrapper-instruction">
+                        Open your browser, navigate to http://SERVER_IP:80 and finish the setup according to the instructions below.
+                    </div>
+                </div>
+            </li>
+        </ul>
+        </div>
+    </div>
+
+## Setup StreamPipes
+
+Once you've opened the browser at the URL given above, you should see StreamPipes application as shown below.
+To set up the system, enter an email address and a password and click on install.
+At this point, it is not necessary to change anything in the advanced settings menu.
+The installation might take some time, continue by clicking on "Go to login page", once all components are successfully configured.
+
+
+On the login page, enter your credentials, then you should be forwarded to the home page.
+
+Congratulations! You've successfully managed to install StreamPipes. Now we're ready to build our first pipeline!
+
+<div class="my-carousel">
+    <img src="/img/quickstart/setup/01_register_user.png" alt="Set Up User">
+    <img src="/img/quickstart/setup/02_user_set_up.png" alt="SetUp StreamPipes Components">
+    <img src="/img/quickstart/setup/03_login.png" alt="Go to login page">
+    <img src="/img/quickstart/setup/04_home.png" alt="Home page">
+</div>
+
+<div class="admonition error">
+<div class="admonition-title">Errors during the installation process</div>
+<p>In most cases, errors during the installation are due to a wrongly configured IP address or an under-powered system.<br/>
+If there is a problem with any of the components, please restart the whole system and delete the "config" directory on the server.
+   This directory is in the same folder as the docker-compose.yml file.<br/>
+   Please also make sure that your system meets the hardware requirements as mentioned in the first section of the installation guide.</p>
+</div>
+
+## Next Steps
+
+Now you can continue with the tutorial on page [First steps](user-guide-first-steps.md).
diff --git a/documentation/docs/user-guide-introduction.md b/documentation/docs/user-guide-introduction.md
new file mode 100644
index 0000000..2674e9a
--- /dev/null
+++ b/documentation/docs/user-guide-introduction.md
@@ -0,0 +1,61 @@
+---
+id: user-guide-introduction
+title: Introduction
+sidebar_label: Introduction
+---
+
+StreamPipes is a framework that enables users to work with data streams.
+It uses a lot of different technologies especially form the fields of big data, distributed computing and semantic web.
+One of the core concepts of StreamPipes is to add a higher semantic layer on top of big data processing technologies to ease their usage.
+StreamPipes is not just a UI, it is a framework with a lot of different capabilities, like modelling new data processing pipelines, execute them in a distributed environment.
+On top it uses semantics to provide guidance to non-technical people for better analyzing their data streams in a self-service manner.
+
+
+
+## Pipelines
+The core concept of StreamPipes are data processing pipelines.
+Those pipelines use data from different sources (Data Streams), then transform it via Processing Elements and store them in an database or send it to third party systems (Data Sinks).
+A brief introduction is given in the following sections.
+At the next page a detailed tour through StreamPies explains all the different features that are available.
+
+
+## Data Streams
+Data Streams represent the primary source for data in StreamPipes.
+A stream is an ordered sequence of events, where an event is described as one or more observation values.
+Those events can come from different sources like sensors, machines, log files or many more.
+It does not matter what kind of serialization format the events have or which kind of transportation protocol the individual data streams use.
+As long as a semantic description is provided StreamPipes is capable of processing the data.
+
+
+## Processing Elements
+Processing Elements are defined as an processor that transforms one or more input event streams to an output event stream. 
+Those transformations can be rather simple like filtering out events based on a predefined rule or more complex by applying algorithms on the data.  
+Processing elements define stream requirements that are a set of minimum properties an incoming event stream must provide. 
+Furthermore, Processing Elements describe their output based on a set of output strategies.
+They also describe further (human) input in form of configuration parameters.
+The Processing Elements can be implemented in multiple technologies.
+This information is not necessary when constructing a pipeline, the user does not need to know where and how the actual algorithm is deployed and executed.
+During the modelling phase it is possible to set configuration parameters, wich are then injected into the program when it is started.
+A description is provided for all parameters and it is ensured by the system that the user can just enter semantically correct values.
+
+
+## Data Sinks
+Data Sinks consume event streams similar to processing elements with the difference that sinks do not provide an output stream, i.e., they are defined as sinks that perform some action or trigger a visualization as a result of a stream transformation.
+The sinks also define stream requirements that must be fulfilled.
+In a pipeline it is not necessary to use a processing element to transform data.
+Often it can make sense to just use a data sink and connect it directly to the sensor to store the raw data into a data store for offline analysis.
+This is very simple with StreamPipes and no additional code must be written to create such a data lake.
+
+
+## Target Audience
+StreamPipes focuses on multiple target groups.
+This guide is for users who interact with the graphical user interface in the browser.
+If you are interested in the technical details or plan to extend the system with new algorithms, please read the Developer Guide.
+The graphical user interface is designed for domain experts who want to analyze data, but are not interested in technical details and do not want to write code.
+The SDK can be used by software developers to extend the framework with new functionality.
+After importing newly developed pipeline elements, they are available to all users of StreamPipes.
+
+
+## Next Steps
+To test StreamPipes on your local environment go to the [installation guide](user-guide-installation.md).
+If you are further interested in the concepts of StreamPipes continue with the [tour](user-guide-tour.md).
diff --git a/documentation/docs/user-guide-software-components.md b/documentation/docs/user-guide-software-components.md
new file mode 100644
index 0000000..fc92dfc
--- /dev/null
+++ b/documentation/docs/user-guide-software-components.md
@@ -0,0 +1,334 @@
+---
+id: user-guide-software-components
+title: Software Components
+sidebar_label: Software Components
+---
+
+This page contains all the software components that can be used within the StreamPipes framework.
+Some of them are mandatory but others are just necessary for a special capabilities.
+In the [Installation Guide](user-guide-installation.md#installation_1) we  already provide a docker-compose.yml file with all the necessary components
+for a minimal setup.
+Extend this configuration files with further containers described on this page and configure StreamPipes
+according to your needs.
+
+
+## StreamPipes Framework
+
+<details class="tip">
+<summary>StreamPipes Backend</summary>
+
+#### Description
+The StreamPipes Backend is the main component of the StreamPipes Framework. It contains the application logic to create and execute pipelines.
+Furthermore, it provides a REST-API that is used by other components for communication.
+
+#### Docker Compose
+```yaml
+backend:
+  image: streampipes/backend
+  depends_on:
+    - "consul"
+  ports:
+    - "8030:8030"
+  volumes:
+    - ./config:/root/.streampipes
+    - ./config/aduna:/root/.aduna
+  networks:
+    spnet:
+```
+</details>
+
+
+<details class="tip">
+<summary>StreamPipes UI</summary>
+
+#### Description
+This service uses nginx and contains the UI of StreamPipes.
+The UI can, for example, be used to import new pipeline elements, create new pipelines and manage the pipeline
+execution. The UI communicates with the backend via the REST interface.
+
+#### Docker Compose
+```yaml
+nginx:
+  image: streampipes/ui
+  ports:
+    - "80:80"
+  depends_on:
+    - backend
+  networks:
+    spnet:
+```
+</details>
+
+## StreamPipes Services
+
+<details class="tip">
+<summary>Consul</summary>
+#### Description
+Consul is used to store configuration parameters of the backend service and processing elements.
+It is further used for service discovery. Once a processing element container is started in the network, it is
+automatically discovered via the service discovery feature of Consul.
+
+#### Docker Compose
+```yaml
+consul:
+    image: consul
+    environment:
+      - "CONSUL_LOCAL_CONFIG={\"disable_update_check\": true}"
+      - "CONSUL_BIND_INTERFACE=eth0"
+      - "CONSUL_HTTP_ADDR=0.0.0.0"
+    entrypoint:
+      - consul
+      - agent
+      - -server
+      - -bootstrap-expect=1
+      - -data-dir=/consul/data
+      - -node=consul-one
+      - -bind={{ GetInterfaceIP "eth0" }}
+      - -client=0.0.0.0
+      - -enable-script-checks=true
+      - -ui
+    volumes:
+      - ./config/consul:/consul/data
+    ports:
+      - "8500:8500"
+      - "8600:8600"
+    networks:
+      spnet:
+        ipv4_address: 172.30.0.9
+```
+</details>
+
+<details class="tip">
+<summary>Zookeeper</summary>
+#### Description
+Apache Kafka and Apache Flink require zookeeper to manage their clusters.
+
+#### Docker Compose
+```yaml
+zookeeper:
+    image: wurstmeister/zookeeper
+    ports:
+      - "2181:2181"
+    networks:
+      spnet:
+```
+</details>
+
+<details class="tip">
+<summary>Kafka</summary>
+
+#### Description
+Kafka is used as the primary message broker. It is possible to use other brokers or even multiple message brokers in a single pipeline, but Kafka is the
+default. The communication between the processing elements in a pipeline is mostly done via Kafka.
+
+#### Docker Compose
+```yaml
+  kafka:
+    image: wurstmeister/kafka:0.10.0.1
+    ports:
+      - "9092:9092"
+    environment:
+      KAFKA_ADVERTISED_HOST_NAME: ###TODO ADD HOSTNAME HERE ###
+      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+    volumes:
+      - /var/run/docker.sock:/var/run/docker.sock
+    networks:
+      spnet:
+```
+</details>
+
+<details class="tip">
+<summary>ActiveMQ</summary>
+#### Description
+ActiveMQ is another message broker which can be used in addition to Kafka. Currently, the main purpose is to provide
+an endpoint for the websocket connections required by the real-time dashboard of the StreamPipes UI.
+
+#### Docker Compose
+```yaml
+activemq:
+  image: streampipes/activemq
+  ports:
+    - "61616:61616"
+    - "61614:61614"
+    - "8161:8161"
+  networks:
+    spnet:
+
+    ```
+</details>
+
+<details class="tip">
+<summary>CouchDB</summary>
+
+#### Description
+CouchDB is the main database for StreamPipes data that needs to be persisted such as pipelines, users and visualizations created in the dashboard.
+
+#### Docker Compose
+```yaml
+couchdb:
+  image: couchdb
+  ports:
+    - "5984:5984"
+  volumes:
+    - ./config/couchdb/data:/usr/local/var/lib/couchdb
+  networks:
+    spnet:
+```
+</details>
+
+<details class="tip">
+<summary>Flink</summary>
+#### Description
+This service sets up a sample flink cluster with one jobmanager and one taskmanager. Although this cluster can be used for testing, it is not recommended for production use.
+
+#### Docker Compose
+```yaml
+jobmanager:
+  image: streampipes/flink
+  ports:
+    - "8081:8099"
+  command: jobmanager
+  networks:
+    spnet:
+
+
+taskmanager:
+  image: ipe-wim-gitlab.fzi.de:5000/streampipes/services/flink
+  command: taskmanager
+  environment:
+    - FLINK_NUM_SLOTS=20
+  networks:
+    spnet:
+```
+</details>
+
+
+## Processing Elements
+
+<details class="tip">
+<summary>PE Examples Sources</summary>
+#### Description
+This Processing Element Container contains several sample data sources that can be used to work with StreamPipes.
+It consists of sources descriptions and data simulators that constantly produce data.
+
+#### Docker Compose
+```yaml
+    pe-examples-sources:
+      image: streampipes/pe-examples-sources:
+      depends_on:
+        - "consul"
+      ports:
+        - "8098:8090"
+      networks:
+        spnet:
+```
+</details>
+
+<details class="tip">
+<summary>PE Examples JVM</summary>
+
+#### Description
+This Processing Element Container contains some sink example implementations, like for example the real-time
+dashboard. This can be used to visualize data within StreamPipes.
+
+#### Docker Compose
+```yaml
+      pe-exanmples-jvm:
+        image: streampipes/pe-examples-jvm
+        depends_on:
+          - "consul"
+        environment:
+          - STREAMPIPES_HOST=###TODO ADD HOSTNAME HERE ###
+        ports:
+          - "8096:8090"
+        networks:
+          spnet:
+```
+</details>
+
+<details class="tip">
+<summary>PE Examples Flink</summary>
+
+#### Description
+The Flink Samples Processing Element Container contains some example algorithms that can be used within processing
+pipelines in the pipeline editor. Those algorithms are deployed to a Flink cluster once the pipeline is started.
+
+#### Docker Compose
+```yaml
+  pe-flink-samples:
+    image: streampipes/pe-examples-flink
+    depends_on:
+      - "consul"
+    ports:
+      - "8094:8090"
+    volumes:
+      - ./config:/root/.streampipes
+    networks:
+      spnet:
+```
+</details>
+
+### Third Party Services
+
+<details class="tip">
+<summary>Elasticsearch</summary>
+
+#### Description
+This service can be used to run Elasticsearch. Data can be written into Elasticsearch with the Elasticsearch
+sink of the PE Flink samples conatiner.
+
+#### Docker Compose
+```yaml
+elasticsearch:
+  image: ipe-wim-gitlab.fzi.de:5000/streampipes/services/elasticsearch
+  ports:
+    - "9200:9200"
+    - "9300:9300"
+  volumes:
+    - ./config/elasticsearch/data:/usr/share/elasticsearch/data
+  networks:
+    spnet:
+```
+</details>
+
+<details class="tip">
+<summary>Kibana</summary>
+#### Description
+Kibana is used to visualize data that is written into Elasticsearch. It can be used in addition to our live dashboard
+to analyse and visualize historic data.
+
+#### Docker Compose
+```yaml
+kibana:
+  image: kibana:5.2.2
+  ports:
+    - "5601:5601"
+  volumes:
+    - ./config/kibana/kibana.yml:/opt/kibana/config/kibana.yml
+  environment:
+    - ELASTICSEARCH_URL=http://elasticsearch:9200
+  networks:
+    spnet:
+```
+</details>
+
+
+<details class="tip">
+<summary>Kafka Web Console</summary>
+
+#### Description
+The kafka web console can be used to monitor the kafka cluster. This is a good tool for debugging your newly
+developed pipeline elements.
+
+#### Docker Compose
+```yaml
+kafka-web-console:
+  image: hwestphal/kafka-web-console
+  ports:
+    - "9000:9000"
+  volumes:
+    - ./config:/data
+  networks:
+    spnet:
+```
+</details>
diff --git a/documentation/docs/user-guide-tour.md b/documentation/docs/user-guide-tour.md
new file mode 100644
index 0000000..ec345b8
--- /dev/null
+++ b/documentation/docs/user-guide-tour.md
@@ -0,0 +1,279 @@
+---
+id: user-guide-tour
+title: Tour
+sidebar_label: Tour
+---
+
+StreamPipes is a framework that enables domain experts to model and execute stream processing pipelines in a big data infrastructure.
+The graphical user interface of StreamPipes is a web application that provides an easy to use solution for domain experts.
+In this page, an overview of the many features of StreamPipes is given. We will tour through all features and explain what they do and how users can interact with them.
+
+First of all, a user needs to log in to the system.
+The credentials for the user are specified during the installation process.
+
+![StreamPipes Login](/img/features_0_62_0/login.png) 
+
+## Home
+After logging in, the user is redirected on to the home page.
+The home page gives an overview over the different features available in the StreamPipes UI.
+
+On the left, the navigation menu can be seen.
+You can either use the icons on the left side or click on the menu icon on the
+top left to open the details view of the navigation menu.
+
+On the top right, a link refers to the documentation and the logout button is present.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/home/home.png" alt="Home">
+    <img src="/img/features_0_62_0/home/open_menu.png" alt="Open Menu">
+</div>
+
+
+
+## Pipeline Editor
+The first feature we are going to explain is the Pipeline Editor.
+This is one of the central features of StreamPipes, since graphical modeling of pipelines takes place in this view.
+
+On the top we can see four tabs: __Data Sets__, __Data Streams__, __Processing Elements__, and __Data Sinks__.
+Each tab contains multiple pipeline elements, which can be installed at runtime.
+The installation of pipeline elements is explained later in section [Install Pipeline Elements](user-guide-tour.md#install-pipeline-elements).
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/editor/2_PipelineEditor_DataStreams.png" alt="Data Streams">
+    <img src="/img/features_0_62_0/editor/3_PipelineEditor_DataProcessors.png" alt="Processing Elements">
+    <img src="/img/features_0_62_0/editor/4_PipelineEditor_DataSinks.png" alt="Data Sinks">
+    <img src="/img/features_0_62_0/editor/1_PipelineEditor_DataSets.png" alt="Data Sets">
+</div>
+
+There are multiple ways to search for a pipeline element. 
+The easiest way is to enter a search term in the search field on the top left corner.
+The system filters the elements according to the search term and only presents the relevant ones.
+Another way is to select one of the categories from the drop down menu.
+The system then filters the elements according to the selected category.
+The category of an element is defined by its functionality.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/editor/5_text_filter.png" alt="Text Filter">
+    <img src="/img/features_0_62_0/editor/6_category_drop_down.png" alt="Category Drop Down">
+    <img src="/img/features_0_62_0/editor/7_category_filter.png" alt="Category Filter">
+</div>
+
+Modelling of a pipeline starts with choosing one ore more data sources. Therefore a data stream or data set must be selected
+and moved into the editor via drag and drop.
+After adding a data stream, we can select a processing element to transform the events of the data stream.
+This is done again by dragging the processing element icon into our editor.
+The mouse can be used to connect the two elements.
+It is automatically checked in the background if the two elements are semantically compatible.
+If this check is successful, a configuration menu is shown. It contains the parameters that can be modified by the
+user. For all parameters, a description is provided and the system also prevents the user from entering parameters that
+are not correct, according to the semantic description of the element.
+It is also ensured that all required parameters are provided by the user, otherwise an error message is displayed.
+
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/editor/8_connect_elements.png" alt="Connect Elements">
+    <img src="/img/features_0_62_0/editor/9_configure_element.png" alt="Configure Elements">
+</div>
+
+When the user tries to connect two elements that are not compatible, the system shows a __connection error__ message.
+The example illustrated below shows such a case. The user tried to connect a text filter to a light sensor.
+This is not possible since the text filter processing element requires at least one event property of type string, which is not provided by the light sensor.
+
+![Connection Error](/img/features_0_62_0/editor/10_connection_error.png) 
+
+To further improve the usability, multiple ways are available to connect new elements besides the drag and drop option.
+Each processing element has multiple short-cut buttons to connect it with another element.
+The first one (eye icon) can be used to get a suggestion of all elements that are compatible with the current element.
+The second one (plus icon) gives a recommendation on the elements the user might want to connect, based on the usage of the component in
+other pipelines.
+There is also a button (question tag icon) to open the documentation of a selected element.
+Elements can be deleted by clicking the 'delete' button.
+Each element can also be re-configured at a later point in time using the configuration button.
+There is one important aspect about re-configuration of pipeline elements you need to be aware of: Only elements that are not yet connected to another element can be modified.
+The reason for this behaviour is that some of the following elements might rely on the configuration of previous elements.
+This way it is ensured that the user can not change the behaviour of the pipeline by accident.
+ 
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/editor/11_shortcut_buttons.png" alt="Shortcut Buttons">
+    <img src="/img/features_0_62_0/editor/12_compatible_elements.png" alt="Compatible Elements">
+    <img src="/img/features_0_62_0/editor/13_recommend_elements.png" alt="Recommended elements">
+</div>
+
+
+After the pipeline is completely modelled, the editor looks similar to the first image below.
+Especially for larger pipelines, the auto layout button in the editor menu might be helpful.
+With this button, the pipeline will be beautifully aligned in the editor, helping users to get a better overview of the complete pipeline.
+On the top left corner of the editor, the 'save' button can be found.
+After the modelling of the pipeline is done, use this button to save and execute the pipeline.
+A save dialogue pops up when clicking the save button. 
+The pipeline title must be entered and an additional description can be provided.
+It is recommended to always provide a description, because it makes it easier for other users to understand the meaning of the pipeline.
+In the save menu, the user can either just store the pipeline configuration or store it and immediately start the pipeline.
+Once the pipeline is executed, the user is redirected to the _Manage Pipeline_ view.
+In this view, the user gets immediate feedback whether all components did start correctly and the pipeline is up and running.
+This view also shows that individual elements might run in different environments on different servers.
+If there is an error during the execution, a notification containing a (hopefully) detailed error description is provided in this view.
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/editor/14_save_adjust.png" alt="Save adjust delete">
+    <img src="/img/features_0_62_0/editor/15_save_dialogue.png" alt="Complete pipeline">
+    <img src="/img/features_0_62_0/editor/16_pipeline_sucessfully_started.png" alt="Pipeline sucessfully started">
+</div>
+
+
+## Manage Pipelines
+The _Manage Pipelines_ view provides an overview of all existing pipelines.
+Existing pipelines can be managed using this view.
+For instance, users can start and stop pipelines or delete them when they are not longer needed.
+Pipeline actions can be performed by clicking one of the buttons next to each pipeline.
+For each pipeline, the title and description is displayed in the table.
+By clicking the edit symbol an overview of the created pipline is shown. In this window you are able to analyze statistics, identify errors or edit the pipeline.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/manage_pipelines/1_pipeline_overview.png" alt="Pipeline Overview">
+    <img src="/img/features_0_62_0/manage_pipelines/2_pipeline_information.png" alt="Pipeline infomation">
+    <img src="/img/features_0_62_0/manage_pipelines/3_delete_pipeline.png" alt="Delete Pipelines">
+    <img src="/img/features_0_62_0/manage_pipelines/4_pipeline_deleted.png" alt="Delete Pipelines">
+</div>
+
+In a setting with many defined pipelines, it can get really hard to keep track of all pipelines.
+This is why we introduce categories.
+A category is a set of pipelines that can be defined by users in order to better organize pipelines.
+By clicking on the "Manage Categories" button on the top left, a new category can be added to StreamPipes.
+In this example, we create a new category named "New Demo Category".
+After saving a category, pipelines can be added to the newly created category.
+The new category is then presented as a tab in the _Pipeline Management_ view.
+This tab contains all previously defined pipelines.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/manage_pipelines/5_pipeline_category.png" alt="Pipeline Category">
+    <img src="/img/features_0_62_0/manage_pipelines/6_add_category.png" alt="Add Pipeline Category">
+    <img src="/img/features_0_62_0/manage_pipelines/7_new_category.png" alt="New Category">
+    <img src="/img/features_0_62_0/manage_pipelines/8_show_new_category.png" alt="Show new Category">
+</div>
+
+## Live Dashboard
+The live dashboard can be used for multiple use cases.
+It is a good way to live monitor a running system in a dashboard, but it can also be used during the pipeline development to get a fast feedback from newly created pipelines.
+Below is a screenshot of an example dashboard showing the current value of pressure, which further shows a line chart and a trafficlight for a water level pipeline.
+All pipelines that contain the "Dashboard Sink" can be visualized in this view.
+To add a new visualisation, click on the "Add visualisation" button on the top left corner.
+Afterwards, a three-step configuration menu is shown.
+The first step is to select the pipeline that should be visualized.
+In the second step, the type of visualization can be defined.
+Currently 10 different types are available, including line charts, various map visualizations and data tables.
+After selecting the type (in our example "Gauge"), you can select the specific measurement values of the data stream that should be displayed.
+In the example below, the water level value should be monitored and the gauge value should range from 0 to 100.
+Once all steps are completed, the new visualization is placed on the dashboard and live data is presented as soon as it becomes available.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/dashboard/01_dashboard.png" alt="Screenshot of live dashboard">
+    <img src="/img/features_0_62_0/dashboard/02_new_visualisation.png" alt="Select Pipeline to visualize">
+    <img src="/img/features_0_62_0/dashboard/03_select_gauge.png" alt="Select Visualization Type">
+    <img src="/img/features_0_62_0/dashboard/04_configure_gauge.png" alt="Configure  Visualzation">
+    <img src="/img/features_0_62_0/dashboard/05_new_gauge_done.png" alt="Show new Visualzation">
+</div>
+
+## File Download
+With the file download,  it is possible to download stored files directly from Elasticsearch.
+This can be very useful for example when a data dumb is needed for a specific time range.
+All data that is written into Elasticsearch using the _Elasticsearch Sink_ can be accessed by the file downloader.
+A common use case is to download data for offline analysis and to train a machine learning algorithm.
+First, an index must be defined, afterwards, the time range must be set.
+A date picker helps users to enter the time range.
+When a user clicks the "Create File" button the file is created.
+All files stored on the server can be downloaded via the download button.
+If the files are not longer needed, they can be deleted by clicking the delete button.
+This will remove the file from the server.
+Since data is stored in Elasticsearch anyways. it is recommended not to store the files for a longer period of time on the server.
+When a file is needed again at a later point in time it is easy to create it again. 
+This way a lot of disk space on the server can be saved, especially when the files are rather large.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/file_download/01_file_download.png" alt="File Download">
+    <img src="/img/features_0_62_0/file_download/02_file_download.png" alt="File Download with selected time range">
+    <img src="/img/features_0_62_0/file_download/03_file_download.png" alt="File Download with new File">
+</div>
+
+## Notifications
+Notifications are a good solution to notify someone when a urgent situation occurs.
+A notification can be created by using the notification sink in the pipeline editor.
+When using such a sink a configuration dialogue is presented to the user.
+In this dialogue the user must provide enough information to resolve the solution when it occurs.
+The message can be nicely configured in the message editor.
+It is also possible to embed values of the event that triggered the notification.
+This can be done with the #property# notation.
+All available properties are presented in the notification configurator.
+When the notification is triggered the #property# template is replaced with the actual value of the property.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/notification/01_select_notification_sink.png" alt="Select Notification">
+    <img src="/img/features_0_62_0/notification/02_configure_notification.png" alt="Configure Notification">
+</div>
+
+A pop up icon on the notification tab in the menu shows the user how many unread notifications currently are in the system.
+This icon also alerts users when new notifications occur.
+In the notification overview all notifications are listed.
+On the top are the new notifications that are not read yet.
+A user can mark them as read by clicking on the little envelope icon.
+Those notifications are then no longer in the unread section, but they remain in the "All Messages" view.
+This way it is possible to keep track of all notifications and have a look at them at a later point in time.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/notification/03_new_notification.png" alt="New Notification">
+</div>
+
+
+## Install Pipeline Elements
+StreamPipes is highly configurable and extensible.
+Pipeline elements can be added and removed during runtime.
+This can be done in the "Install Pipeline Elements" view.
+All available pipeline elements are shown here and can be selected to install or uninstall.
+It is also possible to select multiple or all of them and then install them all together.
+When a new element is installed by the user it is automatically available in the "Pipeline Editor" and can be used for pipelines.
+Elements that are uninstalled are removed from the system. 
+They can not longer be used within pipelines.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/install_elements/01_install_elements.png" alt="Install Elements View">
+    <img src="/img/features_0_62_0/install_elements/02_install_countAggregation.png" alt="Insall new Element">
+    <img src="/img/features_0_62_0/install_elements/03_installed_countAggregation.png" alt="Installation Successful">
+    <img src="/img/features_0_62_0/install_elements/04_countAggregation_in_editor.png" alt="Show new installed element">
+</div>
+
+
+## My Elements
+The "My Elements" view gives a quick overview over all installed pipeline elements.
+Here they can be inspected and the description can be re-imported.
+In this view it is also possible to have a look at the JSON-LD representation of each element.
+This is not important for a user, but it is worth noting that the system uses this machine understandable format to support the user.
+For example all the information of the sources, like data schema or unit information is in the JSON-LD meta data.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/my_elements/01_my_elements.png" alt="My Elements Overview">
+    <img src="/img/features_0_62_0/my_elements/02_my_elements_jsonid.png" alt="View JSON-LD">
+</div>
+
+## Configuration
+The last feature is the _configuration view_.
+Here, it is possible to change the configuration parameters of installed components.
+All components containing processing elements automatically register in StreamPipes when they are started in the same network as the backend component.
+Once a container is registered it is represented in the configuration view.
+The green dot on the top left indicates that tha container is running properly.
+When there is a problem with a container the green dot changes to red and the user knows there is a problem.
+To configure the parameters the top right arrow of a configuration box must be clicked.
+Then the configuration menu pops up. 
+Within there it is possible to change the parameters of a service.
+To make the changes persistent the "Update" button must be clicked.
+A user should keep in mind that sometimes it is necessary to restart a container when the parameters are changed.
+Sometimes it is also necessary to re-import the pipeline element description, either by uninstalling and re-installing them after the container restart or be reloading the description in the "My elements" view.
+
+<div class="my-carousel">
+    <img src="/img/features_0_62_0/configuration/01_configuration_overview.png" alt="Configuration Overview">
+    <img src="/img/features_0_62_0/configuration/02_change_config.png" alt="Change Configuration">
+</div>
+
+
+Congratulations! You've just finished your first tour of StreamPipes.
+Although there's still more to learn, we introduced most of the currently available features.
+On the next page, the different processing elements that come with the installation are explained.
\ No newline at end of file
diff --git a/documentation/website/blog/2018-06-14-welcome.md b/documentation/website/blog/2018-06-14-welcome.md
new file mode 100644
index 0000000..d50bcd9
--- /dev/null
+++ b/documentation/website/blog/2018-06-14-welcome.md
@@ -0,0 +1,11 @@
+---
+title: Welcome to StreamPipes!
+author: Dominik Riemer
+authorURL: http://twitter.com/dominikjriemer
+authorImageURL: /img/riemer.png
+---
+
+Welcome to our new blog. StreamPipes is the new, easy-to-use self-service data analytics toolbox for the Industrial IoT. In the coming weeks, we'll publish a series of blog posts that explain some ideas of StreamPipes. You'll read about use cases, best practices as well as technical details.
+
+Stay tuned!
+
diff --git a/documentation/website/blog/2018-06-18-how-to-understand-your-machine-data.md b/documentation/website/blog/2018-06-18-how-to-understand-your-machine-data.md
new file mode 100644
index 0000000..14aa217
--- /dev/null
+++ b/documentation/website/blog/2018-06-18-how-to-understand-your-machine-data.md
@@ -0,0 +1,105 @@
+---
+title: How to understand your machine data?
+author: Philipp Zehnder
+authorURL: http://twitter.com/philipp10der
+authorImageURL: /img/zehnder.png
+---
+**<div style="float: left; padding-right: 40px;">8 minutes to read</div>**
+<br>
+
+
+Data is the new oil. Probably everybody of you has heard or read this sentence before.
+It is often stated how important data and data understanding is for companies today.
+But what does this actually mean?  How does the data look like that is produced by your machines and systems?
+And how can we solve the big challenge to turn this raw data into insights that can be understood by humans?
+
+<!--truncate-->
+
+When it comes to data analytics people often talk about the big players like Google, Facebook, or Netflix which collect a lot of data about their users and their usage behavior.
+The core of their business model is to use data and turn it into profit. Nowadays all companies have the requirement to analyze their data.
+But since this was not originally part of their business model it is very challenging for them to catch up.
+To gain insights from data, it is often referred to advanced analytics and machine learning. Before such techniques can be used some work must be done.
+A first, basic part of that is to understand your data. We do that in four steps, as can be seen in figure below.
+First data sources must be accessed, then they can be integrated and for example be stored in a data lake.
+After that, we employ rule-based analytics to find patterns and situations.
+In the end, machine learning and advanced analytics algorithms can be used to get more out of the data.
+The idea behind those steps is to generate value as early as possible by collecting the low-hanging fruits first.
+We develop methodologies and tools for the individual steps to also allow domain experts to perform them to bridge the gap between the big tech companies and more traditional industries.
+
+<img class="blog-image" style="max-width:50%;" src="/blog/assets/2018-06-18/01_motivation.png" alt="Motivation Graphic">
+
+The questions we try to answer in these blog posts are:
+
+* **How does data produced by machines look like?**
+* **How can machines and other data sources be accessed?**
+* **How is it possible to process the continuously produced flow of data?**
+
+We illustrate our ideas with an example which we also use during the whole blog series.
+The example is a water circle of a cooling system. You can see such a system in the next image.
+It contains multiple water tanks and pipes that connect those tanks.
+Within the tanks and the pipes are multiple sensors, which measure for example the water level in the tank, the flow rate in the water pipe, or the temperature of the water.
+All those sensors are very simple, they make iterative observations and send the sensed measurements to a computer system.
+
+<img class="blog-image" style="max-width:70%;" src="/blog/assets/2018-06-18/02_anlage.png" alt="Water Circle Image">
+
+Before we go into detail, we want to explain the term streaming data, because we focus on machine and sensor data which is continually produced in a streaming fashion.
+The opposite of streaming data are fixed sized data sets, called batch data.
+Since always new data is produced by sensors, it is not possible to wait till all data is completely produced and then process it.
+Different to that is batch data.
+The advantage of batch data is, when the processing starts no new data is added anymore.
+Streaming data can also be processed with batch processing systems.
+Therefore, a data stream must be separated into fixed chunks which are then processed in a batch fashion.
+This is possible but the more natural fit is to use streaming systems since they were designed to process streaming data.
+
+In an environment where we process machine data we have different components.
+The individual components exchange information in the form of **events**.
+All assets continually producing data are called **data sources** (i.e. a sensor, machine or a software system).
+To access these data sources, **adapters** are needed.
+Adapters are software components (which can run anywhere, for example directly on the machine or separately on a edge device near the machine) that are able to connect to the data source, extract the sensor information and emit events to the **middleware**.
+The middleware is a central component which is an intermediate between the individual components.
+Even if the middleware is a central component, it is not a bottleneck and also not a single point of failure since it is often realized in form of a distributed system.
+**Processing systems** can access data from the middleware, transform it and apply algorithms.
+This is the component where the data is analyzed and higher-level information is generated.
+In the end, results can be stored in **data bases**.
+From there, other applications like for example dashboards can access the calculated results.
+This is the high-level architecture of processing data streams.
+
+<img class="blog-image" src="/blog/assets/2018-06-18/03_architecture.png" alt="Overall architecture">
+
+Once the events are integrated into a middleware, it must be processed to detect situations and generate higher-level events.
+This can be done in an **Event Processing Network (EPN)**.
+An EPN consists of one or multiple **data sources, event processors** transforming and detecting patterns in the data and finally **data sinks**, which can be data bases, alarms or other systems.
+Below such a pipeline is shown where we have two sensors as data sources: A flow rate sensor measuring the flow rate in a water pipe and a water level sensor in a water tank.
+It further contains three processing elements, each with a different functionality.
+The first one detects increases in the water tank, the second one filters out values under a defined threshold.
+The last of the processing elements detects when both situations occur within a defined time window.
+In the end, we employ a data sink, which triggers a notification for a service technician once the modeled situation occurs.
+We call such instances of EPNs processing **pipelines**.
+
+<img class="blog-image" src="/blog/assets/2018-06-18/04_pipeline.png" alt="Example Pipeline">
+
+The pipeline in the image before contains three different processing elements.
+Those elements contain the actual algorithms.
+Such algorithms can be as easy as simple filters on a sensor value or can also be more complex, such as patterns that occur over a period of time.
+But they can also contain machine learning and advanced analytics algorithms (e.g. neural networks) that perform predictions on data streams.
+Event processors are just an abstraction that take a data stream as an input and expose a “new” data stream.
+Furthermore, they contain configuration parameters which can be defined by a user.
+With this concept, we can create reusable components that can be used in many different pipelines.
+
+The goal of StreamPipes is to enable domain experts to do their data analysis themselves by providing tooling, where such processing pipelines can be modeled in a graphical user interface and then executed automatically without the need of an IT expert.
+Processing engines which are used as the basis of the processing elements in our solution often require a higher technological understanding.
+If this understanding is available, they can also be used on their own.
+We use such systems, like Apache Flink, Apache Spark or the brand-new KSQL by Confluent, for the processing but provide an abstraction over them to enable domain experts to model pipelines themselves.
+This has also the advantages that individual runtime solutions can be exchanged over time.
+Furthermore, we provide a semantic layer for all elements, including data sources, processing elements, and data sinks.
+With that layer, our system is capable of understanding the meaning and further support the user to ensure that just semantically correct pipelines are created.
+
+In this first blog post of our series, we gave a first introduction how data produced by machines and systems can be analyzed and value can be generated out of it.
+In the following blog posts we explain the individual parts in more detail to provide you with enough knowledge to start analyzing your own data.
+
+* **[Part 1: Overview](/blog/2018/06/18/how-to-understand-your-machine-data)**
+* Part 2: Data Sources
+* Part 3: Data Processing Elements
+* Part 4: Data Sinks
+* Part 5: Putting it all together (Example Pipeline)
+* Part 6: Practical Tips
diff --git a/documentation/website/blog/2018-09-17-preview-060.md b/documentation/website/blog/2018-09-17-preview-060.md
new file mode 100644
index 0000000..0e8b3b6
--- /dev/null
+++ b/documentation/website/blog/2018-09-17-preview-060.md
@@ -0,0 +1,67 @@
+---
+title: Preview: StreamPipes release 0.60.0
+author: Dominik Riemer
+authorURL: http://twitter.com/dominikjriemer
+authorImageURL: /img/riemer.png
+---
+**<div style="float: left; padding-right: 40px;">5 minutes to read</div>**
+<br>
+
+It's been a while since our last blog post. However, in the background we are working hard to release the next version of StreamPipes which is currently in the final phase.
+In this post, we'd like to give you a preview of the upcoming StreamPipes version 0.60.0, which brings many new features and improvements.
+
+<!--truncate-->
+
+Our first public release is now about 6 months ago. In the meantime, we gathered lots of feedback we got from users concerned with the installation, available pipeline elements and usability.
+We learned what users expect from the tool and really tried to implement feature suggestions from the community.
+
+Here are the major things you'll find in the next version which will be released by the end of September:
+
+## Installation and first-time usage
+We aim to make it as easy as possible for first-time users to try and test StreamPipes. Although we have provided Docker images and a docker-compose file from the beginning, we've learned that our target group looks for an easier way to install the tool, without the need to manually configuring things like ports or hostnames.
+For that reason, we have implemented an installer that is already on [Github](https://www.github.com/streampipes/streampipes-installer). With the next release, you'll find an even easier installer along with a dev-installer targeted at more advanced users that allows to configure the services and pipeline element containers you'd like to use in your StreamPipes instance.
+
+In addition, we wanted to provide better support for users that are trying to build a pipeline for the first time. Although we provide a tour and a user guide, we are aware that nobody likes reading documentations ;-)
+Therefore, the next release includes an interactive product tour that guides you through the first steps with StreamPipes and explains how to build pipelines - right in the pipeline editor!
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2018-09-17/streampipes-060-product-tour.png" alt="Product Tour">
+
+## StreamPipes Connect
+
+Although the SDK provides a developer-oriented way to connect new data sources, many users have asked us to provide an easier way to connect sources and data streams to StreamPipes.
+The next release includes a beta release of StreamPipes Connect. Connect is a major effort that integrates work we've done over the past year and provides an easy way to integrate your data streams right from the UI.
+A data marketplace contains a list of pre-defined adapters (which can be rather generic by selecting a protocol and a data format or quite specific as, e.g., an openSenseMap source).
+These adapters can be configured according to your needs and directly started on a set of worker nodes.
+
+StreamPipes Connect also lays the foundation for some cool new features that make it even easier to connect sources and will be added over the next months.
+In addition, pre-processing features will be integrated that will support edge computing use cases.
+
+Be aware that StreamPipes Connect will be released in a rather early version, so don't use it for production yet.
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2018-09-17/streampipes-060.png" alt="StreamPipes Connect">
+
+## Pipeline Elements
+
+Pipeline elements (data processors and sinks) are the part of StreamPipes that generate added-value.
+While the first release contained a set of around 10 pipeline elements, the next release will provide many new elements that are already integrated.
+New elements will include pattern detection algorithms, geospatial operators, image processing algorithms (e.g., a generic classifier) and even a Slack sink!
+
+In total, the next version includes more than 40 pipeline elements and we will add many more during the next months.
+The new pipeline elements are already available on [Github](https://www.github.com/streampipes/streampipes-pipeline-elements) and will be released as Docker images by the end of September.
+
+## Other features and improvements
+
+* A new wrapper for the [Siddhi](https://github.com/wso2/siddhi) CEP engine
+* various bug fixes and stability improvements
+* many UX improvements
+* many more, stay tuned for the next blog post!
+
+## Feedback
+
+We are absolutely open to your suggestions for further improvements! Let us know (by [mail](mailto:feedback@streampipes.org), [slack](https://slack.streampipes.org) or [twitter](https://www.twitter.com/streampipes)) and we'll consider your feature request in the next release!
+
+
+
+
+
+
diff --git a/documentation/website/blog/2018-11-24-container-cli.md b/documentation/website/blog/2018-11-24-container-cli.md
new file mode 100644
index 0000000..64894a1
--- /dev/null
+++ b/documentation/website/blog/2018-11-24-container-cli.md
@@ -0,0 +1,70 @@
+---
+title: ContainerCLI: A CLI tool for your Docker Application
+author: Philipp Zehnder
+authorURL: http://twitter.com/philipp10der
+authorImageURL: /img/zehnder.png
+---
+**<div style="float: left; padding-right: 40px;">6 minutes to read</div>**
+<br>
+
+In this blog post, we show how we developed a CLI tool to install and manage StreamPipes.
+In addition, we present a template project that enables you to easily create a CLI tool for your own docker application.
+All you need is a working docker-compose.yml file.
+
+<!--truncate-->
+
+StreamPipes has a Microservice architecture and we use Docker and Docker Compose for all of our development, testing, and deployment processes.
+In the beginning, we developed the CLI only to ease the installation process (e.g. managing multiple versions), but over time we realized that the tool is much more powerful.
+For example, StreamPipes has multiple developer roles: Some developers are working on the frontend, others on the backend, and some are developing new algorithms and data sinks for StreamPipes.
+In the figure below, you can see different configurations that are used by different developers.
+All services are running in docker, except for the ones the developer is currently working on.
+As a result, we had many different docker-compose.yml files and when we made changes, we had to synchronize them all.
+
+<img class="blog-image" style="max-width:60%;" src="/blog/assets/2018-11-24/roles.png" alt="Three different developer roles">
+
+Therefore, our goal was to speed up the setup of a development environment, so that developers can focus on their specific programming task.
+That’s why we developed a command line tool that automates the setup of different environments.
+All what is needed is a docker-compose.yml file that describes all services of the application and the template CLI project from [GitHub](https://github.com/streampipes/container-cli).
+Afterwards, you need to split up your compose file and create an individual file for each service. Put those files in individual folders in the services folder.
+Each folder should have the same name as the service name in the docker-compose file. That’s it!
+Now you can use the CLI.
+With the default commands you are able to start, stop, and configure the application. But how is this better than just using docker itself?
+The advantage is the flexibility and extensibility of the tool.
+You can extend the script with tasks you have to perform regularly, like cleaning up volumes, or setting up testing environments.
+
+
+In the following we will describe the tool in more detail.
+You can see all files and folders of the CLI Tool in the next listing.
+The first file “container-cli.m4” contains the script.
+We use [Argbash](https://argbash.io/), which eases the use of bash script parameters and flags.
+In this file you can easily add new custom commands, flags or other functionalities.
+The next file is the actual bash script. It is generated by argbash.
+Since this file is generated automatically, you should not change anything.
+The docker-compose.yml file is the main file.
+This should contain all services, networks, or volume information that is relevant for all the other services of your application.
+All your other services are located in the services folder.
+For each service, create a folder with the name of the service and place a docker-compose.yml in the folder configuring the service.
+Therefore, you can copy and paste the configuration from your current docker-compose.yml.
+The script checks the services folder dynamically, this means you can add services at a later point in time without changing anything else.
+In our example, we have four services: Backend, UI, database, and a test database containing data for the end2end tests.
+Which services should be started when running the command **"container-cli start"** is defined in the active-service file.
+This file contains the names of all services, each in one line, that should be started.
+Often it is required to have multiple configurations of your app. You can save them in the templates folder.
+By running **“container-cli set-template ui_developer”**, you can select the template ui_developer.
+With this command, all services defined in the template are written to the active-services file. (e.g. the ui_developer template contains just the database and backend and the e2e-test template contains the backend, ui, and test-database).
+
+
+<img class="blog-image" style="max-width:30%;" src="/blog/assets/2018-11-24/files.png" alt="Structure of the containter cli project">
+
+
+The last file is the tmpl_env file.
+It contains configuration variables for the individual services.
+Those variables can also be changed by the bash script dynamically, because the tmpl_env file is moved to the .env file in the same folder.
+We use the configuration for example to dynamically set the IP Address of the host system or set the application version.
+To use a variable in a docker-compose file just write it in braces, as shown below.
+
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2018-11-24/variables.png" alt="How to use Environment Variables">
+
+We hope this tool is helpful for your use case.
+When you have any problems, feedback, or ideas for improvements, contact us or open an issue on [GitHub](https://github.com/streampipes/container-cli).
\ No newline at end of file
diff --git a/documentation/website/blog/2019-04-04-release-0610.md b/documentation/website/blog/2019-04-04-release-0610.md
new file mode 100644
index 0000000..1fc3353
--- /dev/null
+++ b/documentation/website/blog/2019-04-04-release-0610.md
@@ -0,0 +1,106 @@
+---
+title: StreamPipes release 0.61.0
+author: Dominik Riemer
+authorURL: http://twitter.com/dominikjriemer
+authorImageURL: /img/riemer.png
+---
+**<div style="float: left; padding-right: 40px;">5 minutes to read</div>**
+<br>
+
+We are proud to release StreamPipes version 0.61.0, which brings many improvements and some exciting new features that simplify pipeline development!
+The new version, which is already available on Docker Hub and can be downloaded using our [installer](https://github.com/streampipes/streampipes-installer) brings over 20 improvements. Under the hood, a reworked event model simplifies the development of new pipeline elements using the built-in SDK.
+<!--truncate-->
+
+Our main mission is to make IoT data management & analytics accessible to everyone.
+This release focuses on better integrating StreamPipes with other systems and data sources and contains UX improvements to ease the development of pipelines.
+
+## StreamPipes Connect
+StreamPipes Connect, which aims to easily connect arbitrary data sources with StreamPipes, has been first introduced as a public beta in version 0.60.0.
+Version 0.61.0 brings many new features to Connect and also implements many bug fixes that now make Connect the recommended way to connect data sources.
+
+StreamPipes Connect relies on an extensible architecture to connect either generic protocols or specific adapters.
+
+In more detail, StreamPipes Connect supports the following protocols, with a completely code-free integration process:
+
+* Data Streams
+    * HTTP/REST
+    * MQTT
+    * Apache Kafka
+    * File
+    * ROS (preview)
+    * GDELT
+
+* Data Sets
+    * HTTP/REST
+    * HDFS
+    * File
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-04-04/connect.png" alt="StreamPipes Connect">
+
+The next version will also include an adapter for OPC UA, which is currently being tested in a real setting.
+
+For protocols that are not yet supported by StreamPipes Connect, a new Maven archetype for easily connecting data sources using the [SDK](/docs/dev-guide-tutorial-sources) is provided.
+
+## Improved Pipeline Modeling
+
+On the UI side, we try to make the graphical modeling of pipelines as simple as possible. Besides some minor improvements (e.g., case-insensitive search for pipeline elements), pipelines are now constantly validated and users get feedback on steps that are still missing (e.g., missing pipeline elements).
+As many other features we are currently implementing, this feature directly comes from a user request - so if you have any suggestions for improvement, let us know!
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-04-04/validation.png" alt="Pipeline Validation">
+
+
+## Reworked Event Model
+
+One of the most important features of v0.61.0 is not directly visible to end users, but drastically eases the development of new pipeline elements with the SDK.
+From this release on, events at runtime are represented in form of a new ``Event Object`` instead of the former ``Map`` representation.
+This feature allows us for much better support of complex data processors which have more than one input data stream and comes with many convenience methods to better extrat fields from an event for processing.
+
+For instance, prior to v0.61.0 you had to extract fields from an incoming events as follows:
+
+```java
+@Override
+  public void onEvent(Map<String, Object> in, String s, SpOutputCollector out) {
+       Integer numberValue = Integer.parseInt(String.valueOf(in.get(MAPPING_PROPERTY_ID));
+  }
+```
+
+The new event model is much simpler and less error-prone:
+
+```java
+ public void onEvent(Event in, SpOutputCollector out) throws SpRuntimeException {
+    Integer numberValue = in
+        .getFieldBySelector(MAPPING_PROPERTY_SELECTOR)
+        .getAsPrimitive()
+        .getAsInteger();
+  }
+```
+
+The new event model is even more useful in cases where your data streams have a complex nested structure or contain list fields.
+If you connect two data streams containing the same fields to a processor, field names are automatically renamed in the background to avoid naming conflicts.
+A complete guide can be found in the [developer guide](/docs/dev-guide-event-model).
+
+## Documentation
+
+The developer guide has been extended:
+Besides reworked tutorials on developing new data processors and data sources, the new documentation includes instructions on adding [stream requirements](/docs/dev-guide-stream-requirements), the new [event model](/docs/dev-guide-event-model), and in-depth guides on defining [static properties](/docs/dev-guide-static-properties) and [output strategies](/docs/dev-guide-output-strategies).
+
+## Other features and improvements
+
+* SSL support: The UI can now be configured to support SSL, see the [documentation](/docs/dev-guide-ssl) on required configurations in your docker-compose file.
+* A new map visualization to visualize spatial data streams
+* The customize dialog is now hidden in case no static properties are present
+* Adapter descriptions can be exported and imported
+* The UI does no longer load any external sources
+* Updated Maven archetypes
+
+See the [release notes](https://github.com/streampipes/streampipes/releases/tag/0.61.0) for more details.
+
+## Feedback
+
+We are absolutely open to your suggestions for further improvements! Let us know (by [mail](mailto:feedback@streampipes.org), [slack](https://slack.streampipes.org) or [twitter](https://www.twitter.com/streampipes)) and we'll consider your feature request in the next release!
+
+
+
+
+
+
diff --git a/documentation/website/blog/2019-05-23-release-0620.md b/documentation/website/blog/2019-05-23-release-0620.md
new file mode 100644
index 0000000..8690c46
--- /dev/null
+++ b/documentation/website/blog/2019-05-23-release-0620.md
@@ -0,0 +1,67 @@
+---
+title: StreamPipes release 0.62.0
+author: Dominik Riemer
+authorURL: http://twitter.com/dominikjriemer
+authorImageURL: /img/riemer.png
+---
+**<div style="float: left; padding-right: 40px;">6 minutes to read</div>**
+<br>
+
+StreamPipes version 0.62.0 has been released! While the last version brought many features under the hood, this new version totally focuses on features and improvements visible to end users.
+The new version is available on Docker Hub and can be downloaded using our [installer](https://github.com/streampipes/streampipes-installer).
+<!--truncate-->
+
+For this release, we focused on improving error handling in the pipeline editor, added more pipeline elements, added more adapters to StreamPipes Connect and, added a documentation on available pipeline elements.
+
+## Pipeline Elements 
+Pipeline elements are the most important part of the StreamPipes toolbox. Although pipeline elements can be easily developed using our SDK, we aim to provide as many ready-to-use elements as possible to provide you with meaningful data harmonization and analytics features right from the beginning.
+StreamPipes v0.62.0 brings many new data processors and data sinks, an overview can be found [here](/docs/pipeline-elements). 
+
+## Pipeline Editor
+To better support users while developing pipelines, we improved the verification of user input in the pipeline editor.
+Users now get hints (directly shown in the pipeline assembly) on missing pipeline elements and error messages are shown if invalid values are entered while configuring pipeline elements (see the screenshot below).
+In addition, the editor now disallows adding new pipeline elements until the active pipeline element is fully configured, otherwise, the element is marked with a red alert symbol, which makes it much easier to discover modeling errors.
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-05-23/pipeline-editor.png" alt="Pipeline Element Verification">
+
+One feature request which was coming from one of our users was to better understand what pipeline elements are doing. For that reason, we now added a feature that allows to see a full documentation of any pipeline element directly in the pipeline editor.
+Once users start to configure a new pipeline element, the documentation can be opened:
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-05-23/pipeline-element-documentation.png" alt="Pipeline Element Documentation">
+
+
+## StreamPipes Connect
+We added more adapters to StreamPipes Connect. 
+Besides generic adapters such as frequently used protocols like OPC UA, MQTT and Kafka, Connect now also includes some new open data adapters and adapters for web APIs to make starting with StreamPipes more funny and easier:
+
+* A Slack source to connect Slack channels with StreamPipes
+* A Coindesk sources that allows you to get the latest Bitcoin rates
+* A Wikipedia sources to get latest edits and created articles from Wikipedia
+* IEXCloud adapters to provide access to stock quotes and news from IEXCloud (an API token is required).
+* A ROS source to access data from the Robot Operating System (ROS)
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-05-23/connect.png" alt="StreamPipes Connect">
+
+
+## Documentation
+The documentation for pipeline elements that is shown in the pipeline editor is now also available in the online documentation.
+Navigate to [pipeline elements](/docs/pipeline-elements) to see an overview of all pipeline elements that are currently provided by StreamPipes.
+
+## Other features and improvements
+
+* Adapters can be sorted and filtered in StreamPipes Connect.
+* Labels and descriptions of pipeline elements can be provided by external resources files as a first step towards internationalization.
+* We upgraded the UI to the latest Angular version 7 and drastically reduced the size of the UI source bundle, which reduces loading time.
+* Stream requirements are now correctly ordered
+
+See the [release notes](https://github.com/streampipes/streampipes/releases/tag/0.62.0) for more details.
+
+## Feedback
+
+We are absolutely open to your suggestions for further improvements! Let us know (by [mail](mailto:feedback@streampipes.org), [slack](https://slack.streampipes.org) or [twitter](https://www.twitter.com/streampipes)) and we'll consider your feature request in the next release!
+
+
+
+
+
+
diff --git a/documentation/website/blog/2019-09-05-release-0630.md b/documentation/website/blog/2019-09-05-release-0630.md
new file mode 100644
index 0000000..88e4c81
--- /dev/null
+++ b/documentation/website/blog/2019-09-05-release-0630.md
@@ -0,0 +1,81 @@
+---
+title: StreamPipes release 0.63.0
+author: Dominik Riemer
+authorURL: https://twitter.com/dominikjriemer
+authorImageURL: /img/riemer.png
+---
+**<div style="float: left; padding-right: 40px;">5 minutes to read</div>**
+<br>
+
+StreamPipes version 0.63.0 has been released! Following our release plan to alternate between user-faced and backend-oriented releases, this release contains many (more than 70) features and improvements, mainly on the backend side.
+The new version is available on Docker Hub and can be downloaded using our [installer](https://github.com/streampipes/streampipes-installer).
+<!--truncate-->
+
+For this release, we focused on improving the stability and resilience of StreamPipes, adding many new features "under the hood".
+
+## Backend & Data Model
+
+* First, v0.63.0 brings **support for binary messaging formats**. Prior to 0.63.0, the only messaging format used at runtime was JSON. Now, several other binary formats are supported: Smile and CBOR as binary JSON representations and FST, a fast Java-based serializer.
+* Second, we improved **resilience of pipelines**. Now, when StreamPipes is stopped, all running pipelines are stopped beforehand and can be restarted later on, so that you don't end up with orphaned pipelines anymore. 
+
+We also extended the data model and SDK with new features:
+
+* Pipeline elements can now provide their own assets such as documentation. Simply add an icon and a markdown file containing the documentation of the pipeline element and both will be visible directly in the pipeline editor after the installation of the pipeline element.
+* We added several new static properties to define required user input: **SecretStaticProperties** can be used to define secret values (e.g., passwords). In addition, **StaticPropertyAlternatives** were introduced to let users select between multiple combinations of static properties (e.g., a time window and a count window along with window settings).
+
+In addition, we migrated all pipeline element container and the backend to Spring Boot applications. The triple store used to store pipeline element descriptions has been extracted to an own microservice. Btw, have you seen the new [helm chart](https://www.github.com/streampipes/streampipes-helm-chart) that simplifies running StreamPipes on Kubernetes?
+ 
+## UI and Pipeline Editor
+We improved the **interactive user guide**, so that it now depends on pipeline elements that are also available in the lite version.
+Three different interactive guides exist: One that teaches you how to create pipelines, another one that illustrates how to connect data with StreamPipes connect and a third one that shows how to create live visualizations using the built-in dashboard.
+
+As **user feedback** is absolutely important to help us improving StreamPipes, we added a feature that allows users to directly give feedback from the StreamPipes UI. Simply click the feedback icon in the top navigation bar and submit your anonymous feedback to us! 
+
+Finally, there are also some functional improvements in the pipeline editor: **Pipeline validation** has improved and gives direct feedback. And, finally, you can now also add multiple dashboards sink to a single pipeline :-)
+
+And a nice feature you can't see but which you'll probably feel: All UI files are now **Gzip compressed** by default, so that StreamPipes will now load much faster. 
+
+## Pipeline Elements
+
+StreamPipes 0.63.0 includes several new pipeline elements:
+* A sink for Apache IoTDB
+* A sink for PostgreSQL
+* A processor to convert boolean values
+* A processor to extract timestamps
+* A processor to compute trigonometric functions
+
+## StreamPipes Connect
+We added more adapters and improved some adapters to StreamPipes Connect:
+
+* An improved MQTT adapter that can handle authentication
+* A new MySQL adapter
+* An improved OPC-UA adapter
+* A new random data generator that eases testing of pipeline elements
+
+In addition, we completely reworked the Connect UI. The schema view now lets you edit events more conveniently and timestamp fields can be directly marked in the UI.
+
+
+## Documentation
+We updated and restructured the user guide, which now consists of four parts: [Introduction](/docs/user-guide-introduction), [Tour](/docs/user-guide-tour), [Installation](/docs/user-guide-installation) and [First Steps](/docs/user-guide-first-steps).
+We also updated all screenshots to reflect the current look and feel of StreamPipes.
+
+In addition, the developer guide was further extended (e.g., there is now a new tutorial on creating data sinks). Maven archetypes are now the recommended way to create new pipeline elements.
+
+
+## Other features and improvements
+
+* An improved installer that lets you choose between two StreamPipes versions for different hardware setups (full or lite, previously named big data and desktop)
+* We updated some core libraries like Apache Flink and Apache Kafka to newer versions.
+* Improved support for Firefox and Edge: Besides Chrome, StreamPipes should also behave and look similar in Firefox and Edge browsers.
+
+See the [release notes](https://github.com/streampipes/streampipes/releases/tag/0.63.0) for a complete list of new features and improvements.
+
+## Feedback
+
+We are absolutely open to your suggestions for further improvements! Let us know (by [mail](mailto:feedback@streampipes.org), [slack](https://slack.streampipes.org) or [twitter](https://www.twitter.com/streampipes)) and we'll consider your feature request in the next release!
+
+
+
+
+
+
diff --git a/documentation/website/blog/2019-09-19-release-0640.md b/documentation/website/blog/2019-09-19-release-0640.md
new file mode 100644
index 0000000..f7a2ab6
--- /dev/null
+++ b/documentation/website/blog/2019-09-19-release-0640.md
@@ -0,0 +1,51 @@
+---
+title: StreamPipes release 0.64.0
+author: Dominik Riemer
+authorURL: https://twitter.com/dominikjriemer
+authorImageURL: /img/riemer.png
+---
+**<div style="float: left; padding-right: 40px;">5 minutes to read</div>**
+<br>
+
+Usually, we try to publish a new StreamPipes release every three months. But after attending a very exciting [ApacheCon](https://www.apachecon.com/acna19/) last week, where we worked with some Apache communities on a few really cool new features, we decided to release these features as soon as possible.
+So here's StreamPipes 0.64.0!
+<!--truncate-->
+
+## StreamPipes Connect
+
+* StreamPipes Connect now has a new (experimental) connector for [ApachePLC4X](https://plc4x.apache.org/). PLC4X is a (really cool) universal protocol adapter for the Industrial IoT with many adapters for systems like Modbus, Beckhoff, S7 and OPC-UA. The new StreamPipes Connect adapter allows you to easily connect to an S7 device, we will add more integrations in the upcoming releases.
+* We added another adapter to retrieve data from the [Apache Pulsar](https://pulsar.apache.org) messaging system.
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-09-19/spconnect.png" alt="PLC4X adapter for StreamPipes Connect">
+
+## Pipeline Elements
+
+* 0.64.0 also includes a new data sink to send data to Apache Pulsar.
+
+## Deployment
+
+StreamPipes relies on a microservice-based architecture and therefore requires quite a few services (> 15 for the full version) to be up and running. 
+This has impact on the memory consumption of the server where StreamPipes is running. On the other hand, we want to make it as easy as possible to try StreamPipes even on laptops with less powerful hardware. 
+
+However, the lite version still required > 8G memory and the full version even more. Additionally, after the last release, we received feedback from the community indicating that the memory consumption has significantly increased.
+So we looked deeper into the issue and discovered that the Docker base images we were using to deliver the StreamPipes services caused high memory consumption.
+
+Before StreamPipes 0.63.0, we used the Alpine Oracle JDK image for most services. In 0.63.0, we switched to an OpenJDK/Alpine distribution. This had an enormous effect on memory, having individual services that reserved more than 1.5GB of memory.
+
+So in this version, we switched to AdoptOpenJDK along with [OpenJ9](https://www.eclipse.org/openj9/). The results are fantastic: The full version including all pipeline elements now needs only 6GB memory (compared to > 16 in the last version).
+
+The screenshot below shows that StreamPipes now is much less resource hungry:
+
+<img class="blog-image" style="max-width:90%;" src="/blog/assets/2019-09-19/memory.png" alt="PLC4X adapter for StreamPipes Connect">
+
+In future versions, we will continue our efforts to decrease the memory consumption of StreamPipes.
+
+## Feedback
+
+We are absolutely open to your suggestions for further improvements! Let us know (by [mail](mailto:feedback@streampipes.org), [slack](https://slack.streampipes.org) or [twitter](https://www.twitter.com/streampipes)) and we'll consider your feature request in the next release!
+
+
+
+
+
+
diff --git a/documentation/website/blog/assets/2018-06-18/01_motivation.png b/documentation/website/blog/assets/2018-06-18/01_motivation.png
new file mode 100644
index 0000000..59819fb
Binary files /dev/null and b/documentation/website/blog/assets/2018-06-18/01_motivation.png differ
diff --git a/documentation/website/blog/assets/2018-06-18/02_anlage.png b/documentation/website/blog/assets/2018-06-18/02_anlage.png
new file mode 100644
index 0000000..5415fd6
Binary files /dev/null and b/documentation/website/blog/assets/2018-06-18/02_anlage.png differ
diff --git a/documentation/website/blog/assets/2018-06-18/03_architecture.png b/documentation/website/blog/assets/2018-06-18/03_architecture.png
new file mode 100644
index 0000000..a7ce916
Binary files /dev/null and b/documentation/website/blog/assets/2018-06-18/03_architecture.png differ
diff --git a/documentation/website/blog/assets/2018-06-18/04_pipeline.png b/documentation/website/blog/assets/2018-06-18/04_pipeline.png
new file mode 100644
index 0000000..f2cf2d9
Binary files /dev/null and b/documentation/website/blog/assets/2018-06-18/04_pipeline.png differ
diff --git a/documentation/website/blog/assets/2018-09-17/streampipes-060-product-tour.png b/documentation/website/blog/assets/2018-09-17/streampipes-060-product-tour.png
new file mode 100644
index 0000000..6ab8d66
Binary files /dev/null and b/documentation/website/blog/assets/2018-09-17/streampipes-060-product-tour.png differ
diff --git a/documentation/website/blog/assets/2018-09-17/streampipes-060.png b/documentation/website/blog/assets/2018-09-17/streampipes-060.png
new file mode 100644
index 0000000..f5e4ff9
Binary files /dev/null and b/documentation/website/blog/assets/2018-09-17/streampipes-060.png differ
diff --git a/documentation/website/blog/assets/2018-11-24/files.png b/documentation/website/blog/assets/2018-11-24/files.png
new file mode 100644
index 0000000..7a2daf5
Binary files /dev/null and b/documentation/website/blog/assets/2018-11-24/files.png differ
diff --git a/documentation/website/blog/assets/2018-11-24/roles.png b/documentation/website/blog/assets/2018-11-24/roles.png
new file mode 100644
index 0000000..b55924e
Binary files /dev/null and b/documentation/website/blog/assets/2018-11-24/roles.png differ
diff --git a/documentation/website/blog/assets/2018-11-24/variables.png b/documentation/website/blog/assets/2018-11-24/variables.png
new file mode 100644
index 0000000..39c1cc3
Binary files /dev/null and b/documentation/website/blog/assets/2018-11-24/variables.png differ
diff --git a/documentation/website/blog/assets/2019-04-04/connect.png b/documentation/website/blog/assets/2019-04-04/connect.png
new file mode 100644
index 0000000..106f3bd
Binary files /dev/null and b/documentation/website/blog/assets/2019-04-04/connect.png differ
diff --git a/documentation/website/blog/assets/2019-04-04/validation.png b/documentation/website/blog/assets/2019-04-04/validation.png
new file mode 100644
index 0000000..0578991
Binary files /dev/null and b/documentation/website/blog/assets/2019-04-04/validation.png differ
diff --git a/documentation/website/blog/assets/2019-05-23/connect.png b/documentation/website/blog/assets/2019-05-23/connect.png
new file mode 100644
index 0000000..38dfed8
Binary files /dev/null and b/documentation/website/blog/assets/2019-05-23/connect.png differ
diff --git a/documentation/website/blog/assets/2019-05-23/pipeline-editor.png b/documentation/website/blog/assets/2019-05-23/pipeline-editor.png
new file mode 100644
index 0000000..a75e96e
Binary files /dev/null and b/documentation/website/blog/assets/2019-05-23/pipeline-editor.png differ
diff --git a/documentation/website/blog/assets/2019-05-23/pipeline-element-documentation.png b/documentation/website/blog/assets/2019-05-23/pipeline-element-documentation.png
new file mode 100644
index 0000000..8ba445e
Binary files /dev/null and b/documentation/website/blog/assets/2019-05-23/pipeline-element-documentation.png differ
diff --git a/documentation/website/blog/assets/2019-09-19/memory.png b/documentation/website/blog/assets/2019-09-19/memory.png
new file mode 100644
index 0000000..4eceb8e
Binary files /dev/null and b/documentation/website/blog/assets/2019-09-19/memory.png differ
diff --git a/documentation/website/blog/assets/2019-09-19/spconnect.png b/documentation/website/blog/assets/2019-09-19/spconnect.png
new file mode 100644
index 0000000..30ed342
Binary files /dev/null and b/documentation/website/blog/assets/2019-09-19/spconnect.png differ
diff --git a/documentation/website/core/Footer.js b/documentation/website/core/Footer.js
new file mode 100644
index 0000000..cf12aa4
--- /dev/null
+++ b/documentation/website/core/Footer.js
@@ -0,0 +1,107 @@
+/**
+ * Copyright (c) 2017-present, Facebook, Inc.
+ *
+ * This source code is licensed under the MIT license found in the
+ * LICENSE file in the root directory of this source tree.
+ */
+
+const React = require('react');
+
+class Footer extends React.Component {
+    docUrl(doc, language) {
+        const baseUrl = this.props.config.baseUrl;
+        return baseUrl + 'docs/' + (language ? language + '/' : '') + doc;
+    }
+
+    pageUrl(doc, language) {
+        const baseUrl = this.props.config.baseUrl;
+        return baseUrl + (language ? language + '/' : '') + doc;
+    }
+
+    render() {
+        const currentYear = new Date().getFullYear();
+        return (
+            <div>
+                <footer className="nav-footer" id="footer">
+                    <section className="sitemap">
+                        <a href={this.props.config.baseUrl} className="nav-home">
+                            {this.props.config.footerIcon && (
+                                <img
+                                    src={this.props.config.baseUrl + this.props.config.footerIcon}
+                                    alt={this.props.config.title}
+                                    width="66"
+                                />
+                            )}
+                        </a>
+                        <div>
+                            <h5>Docs</h5>
+                            <a href={this.props.config.baseUrl + 'docs/user-guide-introduction'}>
+                                User Guide
+                            </a>
+                            <a href={this.props.config.baseUrl + 'docs/dev-guide-introduction'}>
+                                Developer Guide
+                            </a>
+                            <a href={this.props.config.baseUrl + 'docs/faq-common-problems'}> 
+                                Frequently Asked Questions
+                            </a>
+                            <a href={this.props.config.baseUrl + 'docs/privacy'}> 
+                                Privacy Notice
+                            </a>
+                            <a href="https://www.fzi.de/footer/impressum/">
+                                Imprint
+                            </a>
+                        </div>
+                        <div>
+                            <h5>Community</h5>
+                            <a
+                                href="http://stackoverflow.com/questions/tagged/streampipes"
+                                target="_blank"
+                                rel="noreferrer noopener">
+                                Stack Overflow
+                            </a>
+                            <a
+                                href="https://twitter.com/streampipes"
+                                target="_blank"
+                                rel="noreferrer noopener">
+                                Twitter
+                            </a>
+                            <a
+                                href="https://github.com/streampipes"
+                                target="_blank"
+                                rel="noreferrer noopener">
+                                Github
+                            </a>
+                            <a
+                                href="https://hub.docker.com/u/streampipes"
+                                target="_blank"
+                                rel="noreferrer noopener">
+                                Docker Hub
+                            </a>
+                        </div>
+                        <div>
+                            <h5>More</h5>
+                            <a href={this.props.config.baseUrl + 'blog'}>Blog</a>
+                            <a href="https://www.streampipes.org">Website</a>
+                            <a
+                                className="github-button"
+                                href={this.props.config.repoUrl}
+                                data-icon="octicon-star"
+                                data-count-href="/streampipes/streampipes/stargazers"
+                                data-show-count={true}
+                                data-count-aria-label="# stargazers on GitHub"
+                                aria-label="Star this project on GitHub">
+                                Star
+                            </a>
+                        </div>
+                    </section>
+
+                    <section className="copyright">{this.props.config.copyright}</section>
+                </footer>
+            </div>
+
+        );
+    }
+
+}
+
+module.exports = Footer;
diff --git a/documentation/website/i18n/en.json b/documentation/website/i18n/en.json
new file mode 100644
index 0000000..1f6cda9
--- /dev/null
+++ b/documentation/website/i18n/en.json
@@ -0,0 +1,724 @@
+{
+  "_comment": "This file is auto-generated by write-translations.js",
+  "localized-strings": {
+    "next": "Next",
+    "previous": "Previous",
+    "tagline": "Self-Service Data Analytics",
+    "docs": {
+      "dev-guide-archetype": {
+        "title": "Start Developing",
+        "sidebar_label": "Start Developing"
+      },
+      "dev-guide-architecture": {
+        "title": "Architecture",
+        "sidebar_label": "Architecture"
+      },
+      "dev-guide-configuration": {
+        "title": "Configuration",
+        "sidebar_label": "Configuration"
+      },
+      "dev-guide-development-environment": {
+        "title": "Development Environment",
+        "sidebar_label": "Development Environment"
+      },
+      "dev-guide-event-model": {
+        "title": "SDK Guide: Event Model",
+        "sidebar_label": "Event Model"
+      },
+      "dev-guide-introduction": {
+        "title": "Introduction",
+        "sidebar_label": "Introduction"
+      },
+      "dev-guide-migration": {
+        "title": "Migration Guide v0.61",
+        "sidebar_label": "Migration Guide"
+      },
+      "dev-guide-output-strategies": {
+        "title": "SDK Guide: Output Strategies",
+        "sidebar_label": "Output Strategies"
+      },
+      "dev-guide-sdk-guide-processors": {
+        "title": "SDK Guide: Data Processors",
+        "sidebar_label": "SDK Guide: Data Processors"
+      },
+      "dev-guide-sdk-guide-sinks": {
+        "title": "SDK Guide: Data Sinks",
+        "sidebar_label": "SDK Guide: Data Sinks"
+      },
+      "dev-guide-sdk-guide-sources": {
+        "title": "SDK Guide: Data Sources",
+        "sidebar_label": "SDK Guide: Data Sources"
+      },
+      "dev-guide-ssl": {
+        "title": "Use SSL",
+        "sidebar_label": "Use SSL"
+      },
+      "dev-guide-static-properties": {
+        "title": "SDK Guide: Static Properties",
+        "sidebar_label": "Static Properties"
+      },
+      "dev-guide-stream-requirements": {
+        "title": "SDK Guide: Stream Requirements",
+        "sidebar_label": "Stream Requirements"
+      },
+      "dev-guide-tutorial-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "dev-guide-tutorial-sinks": {
+        "title": "Tutorial: Data Sinks",
+        "sidebar_label": "Tutorial: Data Sinks"
+      },
+      "dev-guide-tutorial-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
+      },
+      "faq-common-problems": {
+        "title": "Common Problems",
+        "sidebar_label": "Common Problems"
+      },
+      "license": {
+        "title": "license"
+      },
+      "org.streampipes.processor.geo.flink/org.streampipes.processor.geo.flink": {
+        "title": "Spatial Grid Enrichment",
+        "sidebar_label": "Spatial Grid Enrichment"
+      },
+      "org.streampipes.processor.imageclassification.jvm.generic-image-classification/org.streampipes.processor.imageclassification.jvm.generic-image-classification": {
+        "title": "Generic Image Classification",
+        "sidebar_label": "Generic Image Classification"
+      },
+      "org.streampipes.processor.imageclassification.jvm.image-enricher/org.streampipes.processor.imageclassification.jvm.image-enricher": {
+        "title": "Image Enricher",
+        "sidebar_label": "Image Enricher"
+      },
+      "org.streampipes.processor.imageclassification.qrcode/org.streampipes.processor.imageclassification.qrcode": {
+        "title": "QR Code Reader",
+        "sidebar_label": "QR Code Reader"
+      },
+      "org.streampipes.processors.aggregation.flink.aggregation/org.streampipes.processors.aggregation.flink.aggregation": {
+        "title": "Aggregation",
+        "sidebar_label": "Aggregation"
+      },
+      "org.streampipes.processors.aggregation.flink.rate/org.streampipes.processors.aggregation.flink.rate": {
+        "title": "Event Rate",
+        "sidebar_label": "Event Rate"
+      },
+      "org.streampipes.processors.enricher.flink.processor.math.mathop/org.streampipes.processors.enricher.flink.processor.math.mathop": {
+        "title": "Math",
+        "sidebar_label": "Math"
+      },
+      "org.streampipes.processors.enricher.flink.processor.math.staticmathop/org.streampipes.processors.enricher.flink.processor.math.staticmathop": {
+        "title": "Static Math",
+        "sidebar_label": "Static Math"
+      },
+      "org.streampipes.processors.enricher.flink.processor.trigonometry/org.streampipes.processors.enricher.flink.processor.trigonometry": {
+        "title": "Trigonometry Functions",
+        "sidebar_label": "Trigonometry Functions"
+      },
+      "org.streampipes.processors.enricher.flink.processor.urldereferencing/org.streampipes.processors.enricher.flink.processor.urldereferencing": {
+        "title": "URL Dereferencing",
+        "sidebar_label": "URL Dereferencing"
+      },
+      "org.streampipes.processors.enricher.flink.timestamp/org.streampipes.processors.enricher.flink.timestamp": {
+        "title": "Timestamp Enricher",
+        "sidebar_label": "Timestamp Enricher"
+      },
+      "org.streampipes.processors.filters.jvm.compose/org.streampipes.processors.filters.jvm.compose": {
+        "title": "Compose",
+        "sidebar_label": "Compose"
+      },
+      "org.streampipes.processors.filters.jvm.numericalfilter/org.streampipes.processors.filters.jvm.numericalfilter": {
+        "title": "Numerical Filter",
+        "sidebar_label": "Numerical Filter"
+      },
+      "org.streampipes.processors.filters.jvm.project/org.streampipes.processors.filters.jvm.project": {
+        "title": "Projection",
+        "sidebar_label": "Projection"
+      },
+      "org.streampipes.processors.filters.jvm.textfilter/org.streampipes.processors.filters.jvm.textfilter": {
+        "title": "Text Filter",
+        "sidebar_label": "Text Filter"
+      },
+      "org.streampipes.processors.pattern-detection.flink.peak-detection/org.streampipes.processors.pattern-detection.flink.peak-detection": {
+        "title": "Peak Detection",
+        "sidebar_label": "Peak Detection"
+      },
+      "org.streampipes.processors.siddhi.frequency/org.streampipes.processors.siddhi.frequency": {
+        "title": "Frequency Calculation",
+        "sidebar_label": "Frequency Calculation"
+      },
+      "org.streampipes.processors.siddhi.frequencychange/org.streampipes.processors.siddhi.frequencychange": {
+        "title": "Frequency Change",
+        "sidebar_label": "Frequency Change"
+      },
+      "org.streampipes.processors.siddhi.increase/org.streampipes.processors.siddhi.increase": {
+        "title": "Trend",
+        "sidebar_label": "Trend"
+      },
+      "org.streampipes.processors.siddhi.numericalfilter/org.streampipes.processors.siddhi.numericalfilter": {
+        "title": "Numerical Filter (Siddhi)",
+        "sidebar_label": "Numerical Filter (Siddhi)"
+      },
+      "org.streampipes.processors.siddhi.sequence/org.streampipes.processors.siddhi.sequence": {
+        "title": "Sequence Detection",
+        "sidebar_label": "Sequence Detection"
+      },
+      "org.streampipes.processors.siddhi.stop/org.streampipes.processors.siddhi.stop": {
+        "title": "Stream Stop Detection",
+        "sidebar_label": "Stream Stop Detection"
+      },
+      "org.streampipes.processors.textmining.flink.languagedetection/org.streampipes.processors.textmining.flink.languagedetection": {
+        "title": "Language Detection",
+        "sidebar_label": "Language Detection"
+      },
+      "org.streampipes.processors.textmining.flink.wordcount/org.streampipes.processors.textmining.flink.wordcount": {
+        "title": "Word Count",
+        "sidebar_label": "Word Count"
+      },
+      "org.streampipes.processors.transformation.flink.field-converter/org.streampipes.processors.transformation.flink.field-converter": {
+        "title": "Field Converter",
+        "sidebar_label": "Field Converter"
+      },
+      "org.streampipes.processors.transformation.flink.field-mapper/org.streampipes.processors.transformation.flink.field-mapper": {
+        "title": "Field Mapper",
+        "sidebar_label": "Field Mapper"
+      },
+      "org.streampipes.processors.transformation.flink.field-renamer/org.streampipes.processors.transformation.flink.field-renamer": {
+        "title": "Field Renamer",
+        "sidebar_label": "Field Renamer"
+      },
+      "org.streampipes.processors.transformation.flink.fieldhasher/org.streampipes.processors.transformation.flink.fieldhasher": {
+        "title": "Field Hasher",
+        "sidebar_label": "Field Hasher"
+      },
+      "org.streampipes.processors.transformation.flink.measurement-unit-converter/org.streampipes.processors.transformation.flink.measurement-unit-converter": {
+        "title": "Measurement Unit Converter",
+        "sidebar_label": "Measurement Unit Converter"
+      },
+      "org.streampipes.processors.transformation.flink.processor.boilerplate/org.streampipes.processors.transformation.flink.processor.boilerplate": {
+        "title": "Boilerplate Removal",
+        "sidebar_label": "Boilerplate Removal"
+      },
+      "org.streampipes.processors.transformation.jvm.changed-value/org.streampipes.processors.transformation.jvm.changed-value": {
+        "title": "Value Changed",
+        "sidebar_label": "Value Changed"
+      },
+      "org.streampipes.processors.transformation.jvm.count-array/org.streampipes.processors.transformation.jvm.count-array": {
+        "title": "Count Array",
+        "sidebar_label": "Count Array"
+      },
+      "org.streampipes.processors.transformation.jvm.duration-value/org.streampipes.processors.transformation.jvm.duration-value": {
+        "title": "Calculate Duration",
+        "sidebar_label": "Calculate Duration"
+      },
+      "org.streampipes.processors.transformation.jvm.processor.timestampextractor/org.streampipes.processors.transformation.jvm.processor.timestampextractor": {
+        "title": "Timestamp Extractor",
+        "sidebar_label": "Timestamp Extractor"
+      },
+      "org.streampipes.processors.transformation.jvm.split-array/org.streampipes.processors.transformation.jvm.split-array": {
+        "title": "Split Array",
+        "sidebar_label": "Split Array"
+      },
+      "org.streampipes.sinks.brokers.jvm.jms/org.streampipes.sinks.brokers.jvm.jms": {
+        "title": "JMS Publisher",
+        "sidebar_label": "JMS Publisher"
+      },
+      "org.streampipes.sinks.brokers.jvm.kafka/org.streampipes.sinks.brokers.jvm.kafka": {
+        "title": "Kafka Publisher",
+        "sidebar_label": "Kafka Publisher"
+      },
+      "org.streampipes.sinks.brokers.jvm.rabbitmq/org.streampipes.sinks.brokers.jvm.rabbitmq": {
+        "title": "RabbitMQ Publisher",
+        "sidebar_label": "RabbitMQ Publisher"
+      },
+      "org.streampipes.sinks.brokers.jvm.rest/org.streampipes.sinks.brokers.jvm.rest": {
+        "title": "REST Publisher",
+        "sidebar_label": "REST Publisher"
+      },
+      "org.streampipes.sinks.databases.flink.elasticsearch/org.streampipes.sinks.databases.flink.elasticsearch": {
+        "title": "Elasticsearch",
+        "sidebar_label": "Elasticsearch"
+      },
+      "org.streampipes.sinks.databases.jvm.couchdb/org.streampipes.sinks.databases.jvm.couchdb": {
+        "title": "CouchDB",
+        "sidebar_label": "CouchDB"
+      },
+      "org.streampipes.sinks.databases.jvm.influxdb/org.streampipes.sinks.databases.jvm.influxdb": {
+        "title": "InfluxDB",
+        "sidebar_label": "InfluxDB"
+      },
+      "org.streampipes.sinks.databases.jvm.postgresql/org.streampipes.sinks.databases.jvm.postgresql": {
+        "title": "PostgreSQL",
+        "sidebar_label": "PostgreSQL"
+      },
+      "org.streampipes.sinks.internal.jvm.dashboard/org.streampipes.sinks.internal.jvm.dashboard": {
+        "title": "Dashboard Sink",
+        "sidebar_label": "Dashboard Sink"
+      },
+      "org.streampipes.sinks.internal.jvm.notification/org.streampipes.sinks.internal.jvm.notification": {
+        "title": "Notification",
+        "sidebar_label": "Notification"
+      },
+      "org.streampipes.sinks.notifications.jvm.email/org.streampipes.sinks.notifications.jvm.email": {
+        "title": "Email Notification",
+        "sidebar_label": "Email Notification"
+      },
+      "org.streampipes.sinks.notifications.jvm.onesignal/org.streampipes.sinks.notifications.jvm.onesignal": {
+        "title": "OneSignal",
+        "sidebar_label": "OneSignal"
+      },
+      "org.streampipes.sinks.notifications.jvm.slack/org.streampipes.sinks.notifications.jvm.slack": {
+        "title": "Slack Notification",
+        "sidebar_label": "Slack Notification"
+      },
+      "pipeline-elements": {
+        "title": "Overview",
+        "sidebar_label": "Overview"
+      },
+      "privacy": {
+        "title": "Datenschutzerklärung",
+        "sidebar_label": "Privacy Notice"
+      },
+      "user-guide-first-steps": {
+        "title": "First steps",
+        "sidebar_label": "First steps"
+      },
+      "user-guide-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "user-guide-introduction": {
+        "title": "Introduction",
+        "sidebar_label": "Introduction"
+      },
+      "user-guide-software-components": {
+        "title": "Software Components",
+        "sidebar_label": "Software Components"
+      },
+      "user-guide-tour": {
+        "title": "Tour",
+        "sidebar_label": "Tour"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-architecture": {
+        "title": "Architecture",
+        "sidebar_label": "Architecture"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-development-environment": {
+        "title": "Development Environment",
+        "sidebar_label": "Development Environment"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-introduction": {
+        "title": "Introduction",
+        "sidebar_label": "Introduction"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-sdk-guide-processors": {
+        "title": "SDK Guide Processors",
+        "sidebar_label": "SDK Guide Processors"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-sdk-guide-sinks": {
+        "title": "SDK Guide Sinks",
+        "sidebar_label": "SDK Guide Sinks"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-sdk-guide-sources": {
+        "title": "SDK Guide Sources",
+        "sidebar_label": "SDK Guide Sources"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-tutorial-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "version-0.55.2/version-0.55.2-dev-guide-tutorial-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
+      },
+      "version-0.55.2/version-0.55.2-faq-common-problems": {
+        "title": "Common Problems",
+        "sidebar_label": "Common Problems"
+      },
+      "version-0.55.2/version-0.55.2-pipeline-elements": {
+        "title": "Pipeline Elements",
+        "sidebar_label": "Pipeline Elements"
+      },
+      "version-0.55.2/version-0.55.2-privacy": {
+        "title": "Datenschutzerklärung",
+        "sidebar_label": "Privacy Notice"
+      },
+      "version-0.55.2/version-0.55.2-user-guide-getting-started": {
+        "title": "Getting Started",
+        "sidebar_label": "Getting Started"
+      },
+      "version-0.55.2/version-0.55.2-user-guide-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "version-0.55.2/version-0.55.2-user-guide-introduction": {
+        "title": "Introduction",
+        "sidebar_label": "Introduction"
+      },
+      "version-0.55.2/version-0.55.2-user-guide-processing-elements": {
+        "title": "Processing Elements",
+        "sidebar_label": "Processing Elements"
+      },
+      "version-0.55.2/version-0.55.2-user-guide-software-components": {
+        "title": "Software Components",
+        "sidebar_label": "Software Components"
+      },
+      "version-0.55.2/version-0.55.2-user-guide-tour": {
+        "title": "Tour",
+        "sidebar_label": "Tour"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-archetype": {
+        "title": "Start Developing",
+        "sidebar_label": "Start Developing"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-architecture": {
+        "title": "Architecture",
+        "sidebar_label": "Architecture"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-configuration": {
+        "title": "Configuration",
+        "sidebar_label": "Configuration"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-development-environment": {
+        "title": "Development Environment",
+        "sidebar_label": "Development Environment"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-event-model": {
+        "title": "SDK Guide: Event Model",
+        "sidebar_label": "Event Model"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-migration": {
+        "title": "Migration Guide v0.61",
+        "sidebar_label": "Migration Guide"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-output-strategies": {
+        "title": "SDK Guide: Output Strategies",
+        "sidebar_label": "Output Strategies"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-sdk-guide-processors": {
+        "title": "SDK Guide: Data Processors",
+        "sidebar_label": "SDK Guide: Data Processors"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-sdk-guide-sinks": {
+        "title": "SDK Guide: Data Sinks",
+        "sidebar_label": "SDK Guide: Data Sinks"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-ssl": {
+        "title": "Use SSL",
+        "sidebar_label": "Use SSL"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-static-properties": {
+        "title": "SDK Guide: Static Properties",
+        "sidebar_label": "Static Properties"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-stream-requirements": {
+        "title": "SDK Guide: Stream Requirements",
+        "sidebar_label": "Stream Requirements"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-tutorial-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "version-0.61.0/version-0.61.0-dev-guide-tutorial-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
+      },
+      "version-0.61.0/version-0.61.0-faq-common-problems": {
+        "title": "Common Problems",
+        "sidebar_label": "Common Problems"
+      },
+      "version-0.61.0/version-0.61.0-pipeline-elements": {
+        "title": "Pipeline Elements",
+        "sidebar_label": "Pipeline Elements"
+      },
+      "version-0.61.0/version-0.61.0-user-guide-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "version-0.61.0/version-0.61.0-user-guide-processing-elements": {
+        "title": "Processing Elements",
+        "sidebar_label": "Processing Elements"
+      },
+      "version-0.61.0/version-0.61.0-user-guide-software-components": {
+        "title": "Software Components",
+        "sidebar_label": "Software Components"
+      },
+      "version-0.62.0/org.streampipes.processor.geo.flink/version-0.62.0-org.streampipes.processor.geo.flink": {
+        "title": "Spatial Grid Enrichment",
+        "sidebar_label": "Spatial Grid Enrichment"
+      },
+      "version-0.62.0/org.streampipes.processor.imageclassification.jvm.generic-image-classification/version-0.62.0-org.streampipes.processor.imageclassification.jvm.generic-image-classification": {
+        "title": "Generic Image Classification",
+        "sidebar_label": "Generic Image Classification"
+      },
+      "version-0.62.0/org.streampipes.processor.imageclassification.jvm.image-enricher/version-0.62.0-org.streampipes.processor.imageclassification.jvm.image-enricher": {
+        "title": "Image Enricher",
+        "sidebar_label": "Image Enricher"
+      },
+      "version-0.62.0/org.streampipes.processor.imageclassification.qrcode/version-0.62.0-org.streampipes.processor.imageclassification.qrcode": {
+        "title": "QR Code Reader",
+        "sidebar_label": "QR Code Reader"
+      },
+      "version-0.62.0/org.streampipes.processors.aggregation.flink.aggregation/version-0.62.0-org.streampipes.processors.aggregation.flink.aggregation": {
+        "title": "Aggregation",
+        "sidebar_label": "Aggregation"
+      },
+      "version-0.62.0/org.streampipes.processors.aggregation.flink.rate/version-0.62.0-org.streampipes.processors.aggregation.flink.rate": {
+        "title": "Event Rate",
+        "sidebar_label": "Event Rate"
+      },
+      "version-0.62.0/org.streampipes.processors.enricher.flink.processor.math.mathop/version-0.62.0-org.streampipes.processors.enricher.flink.processor.math.mathop": {
+        "title": "Math",
+        "sidebar_label": "Math"
+      },
+      "version-0.62.0/org.streampipes.processors.enricher.flink.processor.math.staticmathop/version-0.62.0-org.streampipes.processors.enricher.flink.processor.math.staticmathop": {
+        "title": "Static Math",
+        "sidebar_label": "Static Math"
+      },
+      "version-0.62.0/org.streampipes.processors.enricher.flink.processor.trigonometry/version-0.62.0-org.streampipes.processors.enricher.flink.processor.trigonometry": {
+        "title": "Trigonometry Functions",
+        "sidebar_label": "Trigonometry Functions"
+      },
+      "version-0.62.0/org.streampipes.processors.enricher.flink.processor.urldereferencing/version-0.62.0-org.streampipes.processors.enricher.flink.processor.urldereferencing": {
+        "title": "URL Dereferencing",
+        "sidebar_label": "URL Dereferencing"
+      },
+      "version-0.62.0/org.streampipes.processors.enricher.flink.timestamp/version-0.62.0-org.streampipes.processors.enricher.flink.timestamp": {
+        "title": "Timestamp Enricher",
+        "sidebar_label": "Timestamp Enricher"
+      },
+      "version-0.62.0/org.streampipes.processors.filters.jvm.compose/version-0.62.0-org.streampipes.processors.filters.jvm.compose": {
+        "title": "Compose",
+        "sidebar_label": "Compose"
+      },
+      "version-0.62.0/org.streampipes.processors.filters.jvm.numericalfilter/version-0.62.0-org.streampipes.processors.filters.jvm.numericalfilter": {
+        "title": "Numerical Filter",
+        "sidebar_label": "Numerical Filter"
+      },
+      "version-0.62.0/org.streampipes.processors.filters.jvm.project/version-0.62.0-org.streampipes.processors.filters.jvm.project": {
+        "title": "Projection",
+        "sidebar_label": "Projection"
+      },
+      "version-0.62.0/org.streampipes.processors.filters.jvm.textfilter/version-0.62.0-org.streampipes.processors.filters.jvm.textfilter": {
+        "title": "Text Filter",
+        "sidebar_label": "Text Filter"
+      },
+      "version-0.62.0/org.streampipes.processors.pattern-detection.flink.peak-detection/version-0.62.0-org.streampipes.processors.pattern-detection.flink.peak-detection": {
+        "title": "Peak Detection",
+        "sidebar_label": "Peak Detection"
+      },
+      "version-0.62.0/org.streampipes.processors.siddhi.frequency/version-0.62.0-org.streampipes.processors.siddhi.frequency": {
+        "title": "Frequency Calculation",
+        "sidebar_label": "Frequency Calculation"
+      },
+      "version-0.62.0/org.streampipes.processors.siddhi.frequencychange/version-0.62.0-org.streampipes.processors.siddhi.frequencychange": {
+        "title": "Frequency Change",
+        "sidebar_label": "Frequency Change"
+      },
+      "version-0.62.0/org.streampipes.processors.siddhi.increase/version-0.62.0-org.streampipes.processors.siddhi.increase": {
+        "title": "Trend",
+        "sidebar_label": "Trend"
+      },
+      "version-0.62.0/org.streampipes.processors.siddhi.numericalfilter/version-0.62.0-org.streampipes.processors.siddhi.numericalfilter": {
+        "title": "Numerical Filter (Siddhi)",
+        "sidebar_label": "Numerical Filter (Siddhi)"
+      },
+      "version-0.62.0/org.streampipes.processors.siddhi.sequence/version-0.62.0-org.streampipes.processors.siddhi.sequence": {
+        "title": "Sequence Detection",
+        "sidebar_label": "Sequence Detection"
+      },
+      "version-0.62.0/org.streampipes.processors.siddhi.stop/version-0.62.0-org.streampipes.processors.siddhi.stop": {
+        "title": "Stream Stop Detection",
+        "sidebar_label": "Stream Stop Detection"
+      },
+      "version-0.62.0/org.streampipes.processors.textmining.flink.languagedetection/version-0.62.0-org.streampipes.processors.textmining.flink.languagedetection": {
+        "title": "Language Detection",
+        "sidebar_label": "Language Detection"
+      },
+      "version-0.62.0/org.streampipes.processors.textmining.flink.wordcount/version-0.62.0-org.streampipes.processors.textmining.flink.wordcount": {
+        "title": "Word Count",
+        "sidebar_label": "Word Count"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.flink.field-converter/version-0.62.0-org.streampipes.processors.transformation.flink.field-converter": {
+        "title": "Field Converter",
+        "sidebar_label": "Field Converter"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.flink.field-mapper/version-0.62.0-org.streampipes.processors.transformation.flink.field-mapper": {
+        "title": "Field Mapper",
+        "sidebar_label": "Field Mapper"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.flink.field-renamer/version-0.62.0-org.streampipes.processors.transformation.flink.field-renamer": {
+        "title": "Field Renamer",
+        "sidebar_label": "Field Renamer"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.flink.fieldhasher/version-0.62.0-org.streampipes.processors.transformation.flink.fieldhasher": {
+        "title": "Field Hasher",
+        "sidebar_label": "Field Hasher"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.flink.measurement-unit-converter/version-0.62.0-org.streampipes.processors.transformation.flink.measurement-unit-converter": {
+        "title": "Measurement Unit Converter",
+        "sidebar_label": "Measurement Unit Converter"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.flink.processor.boilerplate/version-0.62.0-org.streampipes.processors.transformation.flink.processor.boilerplate": {
+        "title": "Boilerplate Removal",
+        "sidebar_label": "Boilerplate Removal"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.jvm.changed-value/version-0.62.0-org.streampipes.processors.transformation.jvm.changed-value": {
+        "title": "Value Changed",
+        "sidebar_label": "Value Changed"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.jvm.count-array/version-0.62.0-org.streampipes.processors.transformation.jvm.count-array": {
+        "title": "Count Array",
+        "sidebar_label": "Count Array"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.jvm.duration-value/version-0.62.0-org.streampipes.processors.transformation.jvm.duration-value": {
+        "title": "Calculate Duration",
+        "sidebar_label": "Calculate Duration"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.jvm.processor.timestampextractor/version-0.62.0-org.streampipes.processors.transformation.jvm.processor.timestampextractor": {
+        "title": "Timestamp Extractor",
+        "sidebar_label": "Timestamp Extractor"
+      },
+      "version-0.62.0/org.streampipes.processors.transformation.jvm.split-array/version-0.62.0-org.streampipes.processors.transformation.jvm.split-array": {
+        "title": "Split Array",
+        "sidebar_label": "Split Array"
+      },
+      "version-0.62.0/org.streampipes.sinks.brokers.jvm.jms/version-0.62.0-org.streampipes.sinks.brokers.jvm.jms": {
+        "title": "JMS Publisher",
+        "sidebar_label": "JMS Publisher"
+      },
+      "version-0.62.0/org.streampipes.sinks.brokers.jvm.kafka/version-0.62.0-org.streampipes.sinks.brokers.jvm.kafka": {
+        "title": "Kafka Publisher",
+        "sidebar_label": "Kafka Publisher"
+      },
+      "version-0.62.0/org.streampipes.sinks.brokers.jvm.rabbitmq/version-0.62.0-org.streampipes.sinks.brokers.jvm.rabbitmq": {
+        "title": "RabbitMQ Publisher",
+        "sidebar_label": "RabbitMQ Publisher"
+      },
+      "version-0.62.0/org.streampipes.sinks.brokers.jvm.rest/version-0.62.0-org.streampipes.sinks.brokers.jvm.rest": {
+        "title": "REST Publisher",
+        "sidebar_label": "REST Publisher"
+      },
+      "version-0.62.0/org.streampipes.sinks.databases.flink.elasticsearch/version-0.62.0-org.streampipes.sinks.databases.flink.elasticsearch": {
+        "title": "Elasticsearch",
+        "sidebar_label": "Elasticsearch"
+      },
+      "version-0.62.0/org.streampipes.sinks.databases.jvm.couchdb/version-0.62.0-org.streampipes.sinks.databases.jvm.couchdb": {
+        "title": "CouchDB",
+        "sidebar_label": "CouchDB"
+      },
+      "version-0.62.0/org.streampipes.sinks.databases.jvm.influxdb/version-0.62.0-org.streampipes.sinks.databases.jvm.influxdb": {
+        "title": "InfluxDB",
+        "sidebar_label": "InfluxDB"
+      },
+      "version-0.62.0/org.streampipes.sinks.databases.jvm.postgresql/version-0.62.0-org.streampipes.sinks.databases.jvm.postgresql": {
+        "title": "PostgreSQL",
+        "sidebar_label": "PostgreSQL"
+      },
+      "version-0.62.0/org.streampipes.sinks.internal.jvm.dashboard/version-0.62.0-org.streampipes.sinks.internal.jvm.dashboard": {
+        "title": "Dashboard Sink",
+        "sidebar_label": "Dashboard Sink"
+      },
+      "version-0.62.0/org.streampipes.sinks.internal.jvm.notification/version-0.62.0-org.streampipes.sinks.internal.jvm.notification": {
+        "title": "Notification",
+        "sidebar_label": "Notification"
+      },
+      "version-0.62.0/org.streampipes.sinks.notifications.jvm.email/version-0.62.0-org.streampipes.sinks.notifications.jvm.email": {
+        "title": "Email Notification",
+        "sidebar_label": "Email Notification"
+      },
+      "version-0.62.0/org.streampipes.sinks.notifications.jvm.onesignal/version-0.62.0-org.streampipes.sinks.notifications.jvm.onesignal": {
+        "title": "OneSignal",
+        "sidebar_label": "OneSignal"
+      },
+      "version-0.62.0/org.streampipes.sinks.notifications.jvm.slack/version-0.62.0-org.streampipes.sinks.notifications.jvm.slack": {
+        "title": "Slack Notification",
+        "sidebar_label": "Slack Notification"
+      },
+      "version-0.62.0/version-0.62.0-pipeline-elements": {
+        "title": "Overview",
+        "sidebar_label": "Overview"
+      },
+      "version-0.62.0/version-0.62.0-user-guide-tour": {
+        "title": "Tour",
+        "sidebar_label": "Tour"
+      },
+      "version-0.63.0/version-0.63.0-dev-guide-archetype": {
+        "title": "Start Developing",
+        "sidebar_label": "Start Developing"
+      },
+      "version-0.63.0/version-0.63.0-dev-guide-configuration": {
+        "title": "Configuration",
+        "sidebar_label": "Configuration"
+      },
+      "version-0.63.0/version-0.63.0-dev-guide-development-environment": {
+        "title": "Development Environment",
+        "sidebar_label": "Development Environment"
+      },
+      "version-0.63.0/version-0.63.0-dev-guide-tutorial-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "version-0.63.0/version-0.63.0-dev-guide-tutorial-sinks": {
+        "title": "Tutorial: Data Sinks",
+        "sidebar_label": "Tutorial: Data Sinks"
+      },
+      "version-0.63.0/version-0.63.0-dev-guide-tutorial-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
+      },
+      "version-0.63.0/version-0.63.0-user-guide-first-steps": {
+        "title": "First steps",
+        "sidebar_label": "First steps"
+      },
+      "version-0.63.0/version-0.63.0-user-guide-installation": {
+        "title": "Installation",
+        "sidebar_label": "Installation"
+      },
+      "version-0.64.0/version-0.64.0-dev-guide-archetype": {
+        "title": "Start Developing",
+        "sidebar_label": "Start Developing"
+      },
+      "version-0.64.0/version-0.64.0-dev-guide-development-environment": {
+        "title": "Development Environment",
+        "sidebar_label": "Development Environment"
+      },
+      "version-0.64.0/version-0.64.0-dev-guide-tutorial-processors": {
+        "title": "Tutorial: Data Processors",
+        "sidebar_label": "Tutorial: Data Processors"
+      },
+      "version-0.64.0/version-0.64.0-dev-guide-tutorial-sinks": {
+        "title": "Tutorial: Data Sinks",
+        "sidebar_label": "Tutorial: Data Sinks"
+      },
+      "version-0.64.0/version-0.64.0-dev-guide-tutorial-sources": {
+        "title": "Tutorial: Data Sources",
+        "sidebar_label": "Tutorial: Data Sources"
+      }
+    },
+    "links": {
+      "User Guide": "User Guide",
+      "Developer Guide": "Developer Guide",
+      "Pipeline Elements": "Pipeline Elements",
+      "FAQ": "FAQ",
+      "Blog": "Blog",
+      "Community": "Community"
+    },
+    "categories": {
+      "User Guide": "User Guide",
+      "Pipeline Elements": "Pipeline Elements",
+      "Basics": "Basics",
+      "Tutorials": "Tutorials",
+      "SDK Reference": "SDK Reference",
+      "Configuration": "Configuration",
+      "FAQ": "FAQ",
+      "Developer Guide": "Developer Guide"
+    }
+  },
+  "pages-strings": {
+    "Help Translate|recruit community translators for your project": "Help Translate",
+    "Edit this Doc|recruitment message asking to edit the doc source": "Edit",
+    "Translate this Doc|recruitment message asking to translate the docs": "Translate"
+  }
+}
diff --git a/documentation/website/package-lock.json b/documentation/website/package-lock.json
new file mode 100644
index 0000000..0c20d15
--- /dev/null
+++ b/documentation/website/package-lock.json
@@ -0,0 +1,9855 @@
+{
+  "requires": true,
+  "lockfileVersion": 1,
+  "dependencies": {
+    "@babel/code-frame": {
+      "version": "7.5.5",
+      "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.5.5.tgz",
+      "integrity": "sha512-27d4lZoomVyo51VegxI20xZPuSHusqbQag/ztrBC7wegWoQ1nLREPVSKSW8byhTlzTKyNE4ifaTA6lCp7JjpFw==",
+      "dev": true,
+      "requires": {
+        "@babel/highlight": "7.5.0"
+      }
+    },
+    "@babel/core": {
+      "version": "7.7.2",
+      "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.7.2.tgz",
+      "integrity": "sha512-eeD7VEZKfhK1KUXGiyPFettgF3m513f8FoBSWiQ1xTvl1RAopLs42Wp9+Ze911I6H0N9lNqJMDgoZT7gHsipeQ==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "7.5.5",
+        "@babel/generator": "7.7.2",
+        "@babel/helpers": "7.7.0",
+        "@babel/parser": "7.7.3",
+        "@babel/template": "7.7.0",
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2",
+        "convert-source-map": "1.7.0",
+        "debug": "4.1.1",
+        "json5": "2.1.1",
+        "lodash": "4.17.15",
+        "resolve": "1.12.0",
+        "semver": "5.7.1",
+        "source-map": "0.5.7"
+      }
+    },
+    "@babel/generator": {
+      "version": "7.7.2",
+      "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.7.2.tgz",
+      "integrity": "sha512-WthSArvAjYLz4TcbKOi88me+KmDJdKSlfwwN8CnUYn9jBkzhq0ZEPuBfkAWIvjJ3AdEV1Cf/+eSQTnp3IDJKlQ==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2",
+        "jsesc": "2.5.2",
+        "lodash": "4.17.15",
+        "source-map": "0.5.7"
+      }
+    },
+    "@babel/helper-annotate-as-pure": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.7.0.tgz",
+      "integrity": "sha512-k50CQxMlYTYo+GGyUGFwpxKVtxVJi9yh61sXZji3zYHccK9RYliZGSTOgci85T+r+0VFN2nWbGM04PIqwfrpMg==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-builder-binary-assignment-operator-visitor": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.7.0.tgz",
+      "integrity": "sha512-Cd8r8zs4RKDwMG/92lpZcnn5WPQ3LAMQbCw42oqUh4s7vsSN5ANUZjMel0OOnxDLq57hoDDbai+ryygYfCTOsw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-explode-assignable-expression": "7.7.0",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-builder-react-jsx": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-builder-react-jsx/-/helper-builder-react-jsx-7.7.0.tgz",
+      "integrity": "sha512-LSln3cexwInTMYYoFeVLKnYPPMfWNJ8PubTBs3hkh7wCu9iBaqq1OOyW+xGmEdLxT1nhsl+9SJ+h2oUDYz0l2A==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2",
+        "esutils": "2.0.3"
+      }
+    },
+    "@babel/helper-call-delegate": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-call-delegate/-/helper-call-delegate-7.7.0.tgz",
+      "integrity": "sha512-Su0Mdq7uSSWGZayGMMQ+z6lnL00mMCnGAbO/R0ZO9odIdB/WNU/VfQKqMQU0fdIsxQYbRjDM4BixIa93SQIpvw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-hoist-variables": "7.7.0",
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-create-class-features-plugin": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.7.0.tgz",
+      "integrity": "sha512-MZiB5qvTWoyiFOgootmRSDV1udjIqJW/8lmxgzKq6oDqxdmHUjeP2ZUOmgHdYjmUVNABqRrHjYAYRvj8Eox/UA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-function-name": "7.7.0",
+        "@babel/helper-member-expression-to-functions": "7.7.0",
+        "@babel/helper-optimise-call-expression": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-replace-supers": "7.7.0",
+        "@babel/helper-split-export-declaration": "7.7.0"
+      }
+    },
+    "@babel/helper-create-regexp-features-plugin": {
+      "version": "7.7.2",
+      "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.7.2.tgz",
+      "integrity": "sha512-pAil/ZixjTlrzNpjx+l/C/wJk002Wo7XbbZ8oujH/AoJ3Juv0iN/UTcPUHXKMFLqsfS0Hy6Aow8M31brUYBlQQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-regex": "7.5.5",
+        "regexpu-core": "4.6.0"
+      }
+    },
+    "@babel/helper-define-map": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-define-map/-/helper-define-map-7.7.0.tgz",
+      "integrity": "sha512-kPKWPb0dMpZi+ov1hJiwse9dWweZsz3V9rP4KdytnX1E7z3cTNmFGglwklzFPuqIcHLIY3bgKSs4vkwXXdflQA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-function-name": "7.7.0",
+        "@babel/types": "7.7.2",
+        "lodash": "4.17.15"
+      }
+    },
+    "@babel/helper-explode-assignable-expression": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.7.0.tgz",
+      "integrity": "sha512-CDs26w2shdD1urNUAji2RJXyBFCaR+iBEGnFz3l7maizMkQe3saVw9WtjG1tz8CwbjvlFnaSLVhgnu1SWaherg==",
+      "dev": true,
+      "requires": {
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-function-name": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.7.0.tgz",
+      "integrity": "sha512-tDsJgMUAP00Ugv8O2aGEua5I2apkaQO7lBGUq1ocwN3G23JE5Dcq0uh3GvFTChPa4b40AWiAsLvCZOA2rdnQ7Q==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-get-function-arity": "7.7.0",
+        "@babel/template": "7.7.0",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-get-function-arity": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.7.0.tgz",
+      "integrity": "sha512-tLdojOTz4vWcEnHWHCuPN5P85JLZWbm5Fx5ZsMEMPhF3Uoe3O7awrbM2nQ04bDOUToH/2tH/ezKEOR8zEYzqyw==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-hoist-variables": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.7.0.tgz",
+      "integrity": "sha512-LUe/92NqsDAkJjjCEWkNe+/PcpnisvnqdlRe19FahVapa4jndeuJ+FBiTX1rcAKWKcJGE+C3Q3tuEuxkSmCEiQ==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-member-expression-to-functions": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.7.0.tgz",
+      "integrity": "sha512-QaCZLO2RtBcmvO/ekOLp8p7R5X2JriKRizeDpm5ChATAFWrrYDcDxPuCIBXKyBjY+i1vYSdcUTMIb8psfxHDPA==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-module-imports": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.7.0.tgz",
+      "integrity": "sha512-Dv3hLKIC1jyfTkClvyEkYP2OlkzNvWs5+Q8WgPbxM5LMeorons7iPP91JM+DU7tRbhqA1ZeooPaMFvQrn23RHw==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-module-transforms": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.7.0.tgz",
+      "integrity": "sha512-rXEefBuheUYQyX4WjV19tuknrJFwyKw0HgzRwbkyTbB+Dshlq7eqkWbyjzToLrMZk/5wKVKdWFluiAsVkHXvuQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-module-imports": "7.7.0",
+        "@babel/helper-simple-access": "7.7.0",
+        "@babel/helper-split-export-declaration": "7.7.0",
+        "@babel/template": "7.7.0",
+        "@babel/types": "7.7.2",
+        "lodash": "4.17.15"
+      }
+    },
+    "@babel/helper-optimise-call-expression": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.7.0.tgz",
+      "integrity": "sha512-48TeqmbazjNU/65niiiJIJRc5JozB8acui1OS7bSd6PgxfuovWsvjfWSzlgx+gPFdVveNzUdpdIg5l56Pl5jqg==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-plugin-utils": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz",
+      "integrity": "sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==",
+      "dev": true
+    },
+    "@babel/helper-regex": {
+      "version": "7.5.5",
+      "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.5.5.tgz",
+      "integrity": "sha512-CkCYQLkfkiugbRDO8eZn6lRuR8kzZoGXCg3149iTk5se7g6qykSpy3+hELSwquhu+TgHn8nkLiBwHvNX8Hofcw==",
+      "dev": true,
+      "requires": {
+        "lodash": "4.17.15"
+      }
+    },
+    "@babel/helper-remap-async-to-generator": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.7.0.tgz",
+      "integrity": "sha512-pHx7RN8X0UNHPB/fnuDnRXVZ316ZigkO8y8D835JlZ2SSdFKb6yH9MIYRU4fy/KPe5sPHDFOPvf8QLdbAGGiyw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-annotate-as-pure": "7.7.0",
+        "@babel/helper-wrap-function": "7.7.0",
+        "@babel/template": "7.7.0",
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-replace-supers": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.7.0.tgz",
+      "integrity": "sha512-5ALYEul5V8xNdxEeWvRsBzLMxQksT7MaStpxjJf9KsnLxpAKBtfw5NeMKZJSYDa0lKdOcy0g+JT/f5mPSulUgg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-member-expression-to-functions": "7.7.0",
+        "@babel/helper-optimise-call-expression": "7.7.0",
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-simple-access": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.7.0.tgz",
+      "integrity": "sha512-AJ7IZD7Eem3zZRuj5JtzFAptBw7pMlS3y8Qv09vaBWoFsle0d1kAn5Wq6Q9MyBXITPOKnxwkZKoAm4bopmv26g==",
+      "dev": true,
+      "requires": {
+        "@babel/template": "7.7.0",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-split-export-declaration": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.7.0.tgz",
+      "integrity": "sha512-HgYSI8rH08neWlAH3CcdkFg9qX9YsZysZI5GD8LjhQib/mM0jGOZOVkoUiiV2Hu978fRtjtsGsW6w0pKHUWtqA==",
+      "dev": true,
+      "requires": {
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helper-wrap-function": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.7.0.tgz",
+      "integrity": "sha512-sd4QjeMgQqzshSjecZjOp8uKfUtnpmCyQhKQrVJBBgeHAB/0FPi33h3AbVlVp07qQtMD4QgYSzaMI7VwncNK/w==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-function-name": "7.7.0",
+        "@babel/template": "7.7.0",
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/helpers": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.7.0.tgz",
+      "integrity": "sha512-VnNwL4YOhbejHb7x/b5F39Zdg5vIQpUUNzJwx0ww1EcVRt41bbGRZWhAURrfY32T5zTT3qwNOQFWpn+P0i0a2g==",
+      "dev": true,
+      "requires": {
+        "@babel/template": "7.7.0",
+        "@babel/traverse": "7.7.2",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/highlight": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz",
+      "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==",
+      "dev": true,
+      "requires": {
+        "chalk": "2.4.2",
+        "esutils": "2.0.3",
+        "js-tokens": "4.0.0"
+      }
+    },
+    "@babel/parser": {
+      "version": "7.7.3",
+      "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.7.3.tgz",
+      "integrity": "sha512-bqv+iCo9i+uLVbI0ILzKkvMorqxouI+GbV13ivcARXn9NNEabi2IEz912IgNpT/60BNXac5dgcfjb94NjsF33A==",
+      "dev": true
+    },
+    "@babel/plugin-proposal-async-generator-functions": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.7.0.tgz",
+      "integrity": "sha512-ot/EZVvf3mXtZq0Pd0+tSOfGWMizqmOohXmNZg6LNFjHOV+wOPv7BvVYh8oPR8LhpIP3ye8nNooKL50YRWxpYA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-remap-async-to-generator": "7.7.0",
+        "@babel/plugin-syntax-async-generators": "7.2.0"
+      }
+    },
+    "@babel/plugin-proposal-class-properties": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.7.0.tgz",
+      "integrity": "sha512-tufDcFA1Vj+eWvwHN+jvMN6QsV5o+vUlytNKrbMiCeDL0F2j92RURzUsUMWE5EJkLyWxjdUslCsMQa9FWth16A==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-create-class-features-plugin": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-proposal-dynamic-import": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.7.0.tgz",
+      "integrity": "sha512-7poL3Xi+QFPC7sGAzEIbXUyYzGJwbc2+gSD0AkiC5k52kH2cqHdqxm5hNFfLW3cRSTcx9bN0Fl7/6zWcLLnKAQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-dynamic-import": "7.2.0"
+      }
+    },
+    "@babel/plugin-proposal-json-strings": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz",
+      "integrity": "sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-json-strings": "7.2.0"
+      }
+    },
+    "@babel/plugin-proposal-object-rest-spread": {
+      "version": "7.6.2",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.6.2.tgz",
+      "integrity": "sha512-LDBXlmADCsMZV1Y9OQwMc0MyGZ8Ta/zlD9N67BfQT8uYwkRswiu2hU6nJKrjrt/58aH/vqfQlR/9yId/7A2gWw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-object-rest-spread": "7.2.0"
+      }
+    },
+    "@babel/plugin-proposal-optional-catch-binding": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz",
+      "integrity": "sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-optional-catch-binding": "7.2.0"
+      }
+    },
+    "@babel/plugin-proposal-unicode-property-regex": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.7.0.tgz",
+      "integrity": "sha512-mk34H+hp7kRBWJOOAR0ZMGCydgKMD4iN9TpDRp3IIcbunltxEY89XSimc6WbtSLCDrwcdy/EEw7h5CFCzxTchw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-create-regexp-features-plugin": "7.7.2",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-async-generators": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz",
+      "integrity": "sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-dynamic-import": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz",
+      "integrity": "sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-json-strings": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz",
+      "integrity": "sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-jsx": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.2.0.tgz",
+      "integrity": "sha512-VyN4QANJkRW6lDBmENzRszvZf3/4AXaj9YR7GwrWeeN9tEBPuXbmDYVU9bYBN0D70zCWVwUy0HWq2553VCb6Hw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-object-rest-spread": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz",
+      "integrity": "sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-optional-catch-binding": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz",
+      "integrity": "sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-syntax-top-level-await": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.7.0.tgz",
+      "integrity": "sha512-hi8FUNiFIY1fnUI2n1ViB1DR0R4QeK4iHcTlW6aJkrPoTdb8Rf1EMQ6GT3f67DDkYyWgew9DFoOZ6gOoEsdzTA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-arrow-functions": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz",
+      "integrity": "sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-async-to-generator": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.7.0.tgz",
+      "integrity": "sha512-vLI2EFLVvRBL3d8roAMqtVY0Bm9C1QzLkdS57hiKrjUBSqsQYrBsMCeOg/0KK7B0eK9V71J5mWcha9yyoI2tZw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-module-imports": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-remap-async-to-generator": "7.7.0"
+      }
+    },
+    "@babel/plugin-transform-block-scoped-functions": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz",
+      "integrity": "sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-block-scoping": {
+      "version": "7.6.3",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.3.tgz",
+      "integrity": "sha512-7hvrg75dubcO3ZI2rjYTzUrEuh1E9IyDEhhB6qfcooxhDA33xx2MasuLVgdxzcP6R/lipAC6n9ub9maNW6RKdw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "lodash": "4.17.15"
+      }
+    },
+    "@babel/plugin-transform-classes": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.7.0.tgz",
+      "integrity": "sha512-/b3cKIZwGeUesZheU9jNYcwrEA7f/Bo4IdPmvp7oHgvks2majB5BoT5byAql44fiNQYOPzhk2w8DbgfuafkMoA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-annotate-as-pure": "7.7.0",
+        "@babel/helper-define-map": "7.7.0",
+        "@babel/helper-function-name": "7.7.0",
+        "@babel/helper-optimise-call-expression": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-replace-supers": "7.7.0",
+        "@babel/helper-split-export-declaration": "7.7.0",
+        "globals": "11.12.0"
+      }
+    },
+    "@babel/plugin-transform-computed-properties": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz",
+      "integrity": "sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-destructuring": {
+      "version": "7.6.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.6.0.tgz",
+      "integrity": "sha512-2bGIS5P1v4+sWTCnKNDZDxbGvEqi0ijeqM/YqHtVGrvG2y0ySgnEEhXErvE9dA0bnIzY9bIzdFK0jFA46ASIIQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-dotall-regex": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.7.0.tgz",
+      "integrity": "sha512-3QQlF7hSBnSuM1hQ0pS3pmAbWLax/uGNCbPBND9y+oJ4Y776jsyujG2k0Sn2Aj2a0QwVOiOFL5QVPA7spjvzSA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-create-regexp-features-plugin": "7.7.2",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-duplicate-keys": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.5.0.tgz",
+      "integrity": "sha512-igcziksHizyQPlX9gfSjHkE2wmoCH3evvD2qR5w29/Dk0SMKE/eOI7f1HhBdNhR/zxJDqrgpoDTq5YSLH/XMsQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-exponentiation-operator": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz",
+      "integrity": "sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-builder-binary-assignment-operator-visitor": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-for-of": {
+      "version": "7.4.4",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz",
+      "integrity": "sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-function-name": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.7.0.tgz",
+      "integrity": "sha512-P5HKu0d9+CzZxP5jcrWdpe7ZlFDe24bmqP6a6X8BHEBl/eizAsY8K6LX8LASZL0Jxdjm5eEfzp+FIrxCm/p8bA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-function-name": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-literals": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz",
+      "integrity": "sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-member-expression-literals": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.2.0.tgz",
+      "integrity": "sha512-HiU3zKkSU6scTidmnFJ0bMX8hz5ixC93b4MHMiYebmk2lUVNGOboPsqQvx5LzooihijUoLR/v7Nc1rbBtnc7FA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-modules-amd": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.5.0.tgz",
+      "integrity": "sha512-n20UsQMKnWrltocZZm24cRURxQnWIvsABPJlw/fvoy9c6AgHZzoelAIzajDHAQrDpuKFFPPcFGd7ChsYuIUMpg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-module-transforms": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "babel-plugin-dynamic-import-node": "2.3.0"
+      }
+    },
+    "@babel/plugin-transform-modules-commonjs": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.7.0.tgz",
+      "integrity": "sha512-KEMyWNNWnjOom8vR/1+d+Ocz/mILZG/eyHHO06OuBQ2aNhxT62fr4y6fGOplRx+CxCSp3IFwesL8WdINfY/3kg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-module-transforms": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-simple-access": "7.7.0",
+        "babel-plugin-dynamic-import-node": "2.3.0"
+      }
+    },
+    "@babel/plugin-transform-modules-systemjs": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.7.0.tgz",
+      "integrity": "sha512-ZAuFgYjJzDNv77AjXRqzQGlQl4HdUM6j296ee4fwKVZfhDR9LAGxfvXjBkb06gNETPnN0sLqRm9Gxg4wZH6dXg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-hoist-variables": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "babel-plugin-dynamic-import-node": "2.3.0"
+      }
+    },
+    "@babel/plugin-transform-modules-umd": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.7.0.tgz",
+      "integrity": "sha512-u7eBA03zmUswQ9LQ7Qw0/ieC1pcAkbp5OQatbWUzY1PaBccvuJXUkYzoN1g7cqp7dbTu6Dp9bXyalBvD04AANA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-module-transforms": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-named-capturing-groups-regex": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.7.0.tgz",
+      "integrity": "sha512-+SicSJoKouPctL+j1pqktRVCgy+xAch1hWWTMy13j0IflnyNjaoskj+DwRQFimHbLqO3sq2oN2CXMvXq3Bgapg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-create-regexp-features-plugin": "7.7.2"
+      }
+    },
+    "@babel/plugin-transform-new-target": {
+      "version": "7.4.4",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz",
+      "integrity": "sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-object-super": {
+      "version": "7.5.5",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.5.5.tgz",
+      "integrity": "sha512-un1zJQAhSosGFBduPgN/YFNvWVpRuHKU7IHBglLoLZsGmruJPOo6pbInneflUdmq7YvSVqhpPs5zdBvLnteltQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-replace-supers": "7.7.0"
+      }
+    },
+    "@babel/plugin-transform-parameters": {
+      "version": "7.4.4",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz",
+      "integrity": "sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-call-delegate": "7.7.0",
+        "@babel/helper-get-function-arity": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-property-literals": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.2.0.tgz",
+      "integrity": "sha512-9q7Dbk4RhgcLp8ebduOpCbtjh7C0itoLYHXd9ueASKAG/is5PQtMR5VJGka9NKqGhYEGn5ITahd4h9QeBMylWQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-react-display-name": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.2.0.tgz",
+      "integrity": "sha512-Htf/tPa5haZvRMiNSQSFifK12gtr/8vwfr+A9y69uF0QcU77AVu4K7MiHEkTxF7lQoHOL0F9ErqgfNEAKgXj7A==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-react-jsx": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.7.0.tgz",
+      "integrity": "sha512-mXhBtyVB1Ujfy+0L6934jeJcSXj/VCg6whZzEcgiiZHNS0PGC7vUCsZDQCxxztkpIdF+dY1fUMcjAgEOC3ZOMQ==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-builder-react-jsx": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-jsx": "7.2.0"
+      }
+    },
+    "@babel/plugin-transform-react-jsx-self": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.2.0.tgz",
+      "integrity": "sha512-v6S5L/myicZEy+jr6ielB0OR8h+EH/1QFx/YJ7c7Ua+7lqsjj/vW6fD5FR9hB/6y7mGbfT4vAURn3xqBxsUcdg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-jsx": "7.2.0"
+      }
+    },
+    "@babel/plugin-transform-react-jsx-source": {
+      "version": "7.5.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.5.0.tgz",
+      "integrity": "sha512-58Q+Jsy4IDCZx7kqEZuSDdam/1oW8OdDX8f+Loo6xyxdfg1yF0GE2XNJQSTZCaMol93+FBzpWiPEwtbMloAcPg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-syntax-jsx": "7.2.0"
+      }
+    },
+    "@babel/plugin-transform-regenerator": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.7.0.tgz",
+      "integrity": "sha512-AXmvnC+0wuj/cFkkS/HFHIojxH3ffSXE+ttulrqWjZZRaUOonfJc60e1wSNT4rV8tIunvu/R3wCp71/tLAa9xg==",
+      "dev": true,
+      "requires": {
+        "regenerator-transform": "0.14.1"
+      }
+    },
+    "@babel/plugin-transform-reserved-words": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.2.0.tgz",
+      "integrity": "sha512-fz43fqW8E1tAB3DKF19/vxbpib1fuyCwSPE418ge5ZxILnBhWyhtPgz8eh1RCGGJlwvksHkyxMxh0eenFi+kFw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-shorthand-properties": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz",
+      "integrity": "sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-spread": {
+      "version": "7.6.2",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.6.2.tgz",
+      "integrity": "sha512-DpSvPFryKdK1x+EDJYCy28nmAaIMdxmhot62jAXF/o99iA33Zj2Lmcp3vDmz+MUh0LNYVPvfj5iC3feb3/+PFg==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-sticky-regex": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz",
+      "integrity": "sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/helper-regex": "7.5.5"
+      }
+    },
+    "@babel/plugin-transform-template-literals": {
+      "version": "7.4.4",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz",
+      "integrity": "sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-annotate-as-pure": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-typeof-symbol": {
+      "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz",
+      "integrity": "sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/plugin-transform-unicode-regex": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.7.0.tgz",
+      "integrity": "sha512-RrThb0gdrNwFAqEAAx9OWgtx6ICK69x7i9tCnMdVrxQwSDp/Abu9DXFU5Hh16VP33Rmxh04+NGW28NsIkFvFKA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-create-regexp-features-plugin": "7.7.2",
+        "@babel/helper-plugin-utils": "7.0.0"
+      }
+    },
+    "@babel/polyfill": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/polyfill/-/polyfill-7.7.0.tgz",
+      "integrity": "sha512-/TS23MVvo34dFmf8mwCisCbWGrfhbiWZSwBo6HkADTBhUa2Q/jWltyY/tpofz/b6/RIhqaqQcquptCirqIhOaQ==",
+      "dev": true,
+      "requires": {
+        "core-js": "2.6.10",
+        "regenerator-runtime": "0.13.3"
+      }
+    },
+    "@babel/preset-env": {
+      "version": "7.7.1",
+      "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.7.1.tgz",
+      "integrity": "sha512-/93SWhi3PxcVTDpSqC+Dp4YxUu3qZ4m7I76k0w73wYfn7bGVuRIO4QUz95aJksbS+AD1/mT1Ie7rbkT0wSplaA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-module-imports": "7.7.0",
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-proposal-async-generator-functions": "7.7.0",
+        "@babel/plugin-proposal-dynamic-import": "7.7.0",
+        "@babel/plugin-proposal-json-strings": "7.2.0",
+        "@babel/plugin-proposal-object-rest-spread": "7.6.2",
+        "@babel/plugin-proposal-optional-catch-binding": "7.2.0",
+        "@babel/plugin-proposal-unicode-property-regex": "7.7.0",
+        "@babel/plugin-syntax-async-generators": "7.2.0",
+        "@babel/plugin-syntax-dynamic-import": "7.2.0",
+        "@babel/plugin-syntax-json-strings": "7.2.0",
+        "@babel/plugin-syntax-object-rest-spread": "7.2.0",
+        "@babel/plugin-syntax-optional-catch-binding": "7.2.0",
+        "@babel/plugin-syntax-top-level-await": "7.7.0",
+        "@babel/plugin-transform-arrow-functions": "7.2.0",
+        "@babel/plugin-transform-async-to-generator": "7.7.0",
+        "@babel/plugin-transform-block-scoped-functions": "7.2.0",
+        "@babel/plugin-transform-block-scoping": "7.6.3",
+        "@babel/plugin-transform-classes": "7.7.0",
+        "@babel/plugin-transform-computed-properties": "7.2.0",
+        "@babel/plugin-transform-destructuring": "7.6.0",
+        "@babel/plugin-transform-dotall-regex": "7.7.0",
+        "@babel/plugin-transform-duplicate-keys": "7.5.0",
+        "@babel/plugin-transform-exponentiation-operator": "7.2.0",
+        "@babel/plugin-transform-for-of": "7.4.4",
+        "@babel/plugin-transform-function-name": "7.7.0",
+        "@babel/plugin-transform-literals": "7.2.0",
+        "@babel/plugin-transform-member-expression-literals": "7.2.0",
+        "@babel/plugin-transform-modules-amd": "7.5.0",
+        "@babel/plugin-transform-modules-commonjs": "7.7.0",
+        "@babel/plugin-transform-modules-systemjs": "7.7.0",
+        "@babel/plugin-transform-modules-umd": "7.7.0",
+        "@babel/plugin-transform-named-capturing-groups-regex": "7.7.0",
+        "@babel/plugin-transform-new-target": "7.4.4",
+        "@babel/plugin-transform-object-super": "7.5.5",
+        "@babel/plugin-transform-parameters": "7.4.4",
+        "@babel/plugin-transform-property-literals": "7.2.0",
+        "@babel/plugin-transform-regenerator": "7.7.0",
+        "@babel/plugin-transform-reserved-words": "7.2.0",
+        "@babel/plugin-transform-shorthand-properties": "7.2.0",
+        "@babel/plugin-transform-spread": "7.6.2",
+        "@babel/plugin-transform-sticky-regex": "7.2.0",
+        "@babel/plugin-transform-template-literals": "7.4.4",
+        "@babel/plugin-transform-typeof-symbol": "7.2.0",
+        "@babel/plugin-transform-unicode-regex": "7.7.0",
+        "@babel/types": "7.7.2",
+        "browserslist": "4.7.2",
+        "core-js-compat": "3.4.1",
+        "invariant": "2.2.4",
+        "js-levenshtein": "1.1.6",
+        "semver": "5.7.1"
+      }
+    },
+    "@babel/preset-react": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.7.0.tgz",
+      "integrity": "sha512-IXXgSUYBPHUGhUkH+89TR6faMcBtuMW0h5OHbMuVbL3/5wK2g6a2M2BBpkLa+Kw0sAHiZ9dNVgqJMDP/O4GRBA==",
+      "dev": true,
+      "requires": {
+        "@babel/helper-plugin-utils": "7.0.0",
+        "@babel/plugin-transform-react-display-name": "7.2.0",
+        "@babel/plugin-transform-react-jsx": "7.7.0",
+        "@babel/plugin-transform-react-jsx-self": "7.2.0",
+        "@babel/plugin-transform-react-jsx-source": "7.5.0"
+      }
+    },
+    "@babel/register": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/register/-/register-7.7.0.tgz",
+      "integrity": "sha512-HV3GJzTvSoyOMWGYn2TAh6uL6g+gqKTgEZ99Q3+X9UURT1VPT/WcU46R61XftIc5rXytcOHZ4Z0doDlsjPomIg==",
+      "dev": true,
+      "requires": {
+        "find-cache-dir": "2.1.0",
+        "lodash": "4.17.15",
+        "make-dir": "2.1.0",
+        "pirates": "4.0.1",
+        "source-map-support": "0.5.16"
+      }
+    },
+    "@babel/template": {
+      "version": "7.7.0",
+      "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.7.0.tgz",
+      "integrity": "sha512-OKcwSYOW1mhWbnTBgQY5lvg1Fxg+VyfQGjcBduZFljfc044J5iDlnDSfhQ867O17XHiSCxYHUxHg2b7ryitbUQ==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "7.5.5",
+        "@babel/parser": "7.7.3",
+        "@babel/types": "7.7.2"
+      }
+    },
+    "@babel/traverse": {
+      "version": "7.7.2",
+      "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.7.2.tgz",
+      "integrity": "sha512-TM01cXib2+rgIZrGJOLaHV/iZUAxf4A0dt5auY6KNZ+cm6aschuJGqKJM3ROTt3raPUdIDk9siAufIFEleRwtw==",
+      "dev": true,
+      "requires": {
+        "@babel/code-frame": "7.5.5",
+        "@babel/generator": "7.7.2",
+        "@babel/helper-function-name": "7.7.0",
+        "@babel/helper-split-export-declaration": "7.7.0",
+        "@babel/parser": "7.7.3",
+        "@babel/types": "7.7.2",
+        "debug": "4.1.1",
+        "globals": "11.12.0",
+        "lodash": "4.17.15"
+      }
+    },
+    "@babel/types": {
+      "version": "7.7.2",
+      "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.7.2.tgz",
+      "integrity": "sha512-YTf6PXoh3+eZgRCBzzP25Bugd2ngmpQVrk7kXX0i5N9BO7TFBtIgZYs7WtxtOGs8e6A4ZI7ECkbBCEHeXocvOA==",
+      "dev": true,
+      "requires": {
+        "esutils": "2.0.3",
+        "lodash": "4.17.15",
+        "to-fast-properties": "2.0.0"
+      }
+    },
+    "@mrmlnc/readdir-enhanced": {
+      "version": "2.2.1",
+      "resolved": "https://registry.npmjs.org/@mrmlnc/readdir-enhanced/-/readdir-enhanced-2.2.1.tgz",
+      "integrity": "sha512-bPHp6Ji8b41szTOcaP63VlnbbO5Ny6dwAATtY6JTjh5N2OLrb5Qk/Th5cRkRQhkWCt+EJsYrNB0MiL+Gpn6e3g==",
+      "dev": true,
+      "requires": {
+        "call-me-maybe": "1.0.1",
+        "glob-to-regexp": "0.3.0"
+      }
+    },
+    "@nodelib/fs.stat": {
+      "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz",
+      "integrity": "sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==",
+      "dev": true
+    },
+    "@sindresorhus/is": {
+      "version": "0.7.0",
+      "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz",
+      "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==",
+      "dev": true
+    },
+    "@types/cheerio": {
+      "version": "0.22.14",
+      "resolved": "https://registry.npmjs.org/@types/cheerio/-/cheerio-0.22.14.tgz",
+      "integrity": "sha512-SVtcP2fvPYrebTwpyqxjxb7K5v3ZOAdH409yAEWFPpZThCSGa1K2IFfx6Rg6ttvThCBQXP4fU9WF94sqLoiQGg==",
+      "dev": true,
+      "requires": {
+        "@types/node": "12.12.7"
+      }
+    },
+    "@types/node": {
+      "version": "12.12.7",
+      "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.7.tgz",
+      "integrity": "sha512-E6Zn0rffhgd130zbCbAr/JdXfXkoOUFAKNs/rF8qnafSJ8KYaA/j3oz7dcwal+lYjLA7xvdd5J4wdYpCTlP8+w==",
+      "dev": true
+    },
+    "@types/q": {
+      "version": "1.5.2",
+      "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.2.tgz",
+      "integrity": "sha512-ce5d3q03Ex0sy4R14722Rmt6MT07Ua+k4FwDfdcToYJcMKNtRVQvJ6JCAPdAmAnbRb6CsX6aYb9m96NGod9uTw==",
+      "dev": true
+    },
+    "accepts": {
+      "version": "1.3.7",
+      "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz",
+      "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==",
+      "dev": true,
+      "requires": {
+        "mime-types": "2.1.25",
+        "negotiator": "0.6.2"
+      }
+    },
+    "address": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/address/-/address-1.0.3.tgz",
+      "integrity": "sha512-z55ocwKBRLryBs394Sm3ushTtBeg6VAeuku7utSoSnsJKvKcnXFIyC6vh27n3rXyxSgkJBBCAvyOn7gSUcTYjg==",
+      "dev": true
+    },
+    "ajv": {
+      "version": "6.10.2",
+      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz",
+      "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==",
+      "dev": true,
+      "requires": {
+        "fast-deep-equal": "2.0.1",
+        "fast-json-stable-stringify": "2.0.0",
+        "json-schema-traverse": "0.4.1",
+        "uri-js": "4.2.2"
+      }
+    },
+    "alphanum-sort": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/alphanum-sort/-/alphanum-sort-1.0.2.tgz",
+      "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=",
+      "dev": true
+    },
+    "ansi-escapes": {
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz",
+      "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==",
+      "dev": true
+    },
+    "ansi-red": {
+      "version": "0.1.1",
+      "resolved": "https://registry.npmjs.org/ansi-red/-/ansi-red-0.1.1.tgz",
+      "integrity": "sha1-jGOPnRCAgAo1PJwoyKgcpHBdlGw=",
+      "dev": true,
+      "requires": {
+        "ansi-wrap": "0.1.0"
+      }
+    },
+    "ansi-regex": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
+      "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
+      "dev": true
+    },
+    "ansi-styles": {
+      "version": "3.2.1",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+      "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+      "dev": true,
+      "requires": {
+        "color-convert": "1.9.3"
+      }
+    },
+    "ansi-wrap": {
+      "version": "0.1.0",
+      "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz",
+      "integrity": "sha1-qCJQ3bABXponyoLoLqYDu/pF768=",
+      "dev": true
+    },
+    "arch": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/arch/-/arch-2.1.1.tgz",
+      "integrity": "sha512-BLM56aPo9vLLFVa8+/+pJLnrZ7QGGTVHWsCwieAWT9o9K8UeGaQbzZbGoabWLOo2ksBCztoXdqBZBplqLDDCSg==",
+      "dev": true
+    },
+    "archive-type": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz",
+      "integrity": "sha1-+S5yIzBW38aWlHJ0nCZ72wRrHXA=",
+      "dev": true,
+      "requires": {
+        "file-type": "4.4.0"
+      },
+      "dependencies": {
+        "file-type": {
+          "version": "4.4.0",
+          "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz",
+          "integrity": "sha1-G2AOX8ofvcboDApwxxyNul95BsU=",
+          "dev": true
+        }
+      }
+    },
+    "argparse": {
+      "version": "1.0.10",
+      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
+      "dev": true,
+      "requires": {
+        "sprintf-js": "1.0.3"
+      }
+    },
+    "arr-diff": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz",
+      "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=",
+      "dev": true
+    },
+    "arr-flatten": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz",
+      "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==",
+      "dev": true
+    },
+    "arr-union": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz",
+      "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=",
+      "dev": true
+    },
+    "array-filter": {
+      "version": "0.0.1",
+      "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-0.0.1.tgz",
+      "integrity": "sha1-fajPLiZijtcygDWB/SH2fKzS7uw=",
+      "dev": true
+    },
+    "array-find-index": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz",
+      "integrity": "sha1-3wEKoSh+Fku9pvlyOwqWoexBh6E=",
+      "dev": true
+    },
+    "array-flatten": {
+      "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
+      "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=",
+      "dev": true
+    },
+    "array-map": {
+      "version": "0.0.0",
+      "resolved": "https://registry.npmjs.org/array-map/-/array-map-0.0.0.tgz",
+      "integrity": "sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI=",
+      "dev": true
+    },
+    "array-reduce": {
+      "version": "0.0.0",
+      "resolved": "https://registry.npmjs.org/array-reduce/-/array-reduce-0.0.0.tgz",
+      "integrity": "sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys=",
+      "dev": true
+    },
+    "array-union": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz",
+      "integrity": "sha1-mjRBDk9OPaI96jdb5b5w8kd47Dk=",
+      "dev": true,
+      "requires": {
+        "array-uniq": "1.0.3"
+      }
+    },
+    "array-uniq": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/array-uniq/-/array-uniq-1.0.3.tgz",
+      "integrity": "sha1-r2rId6Jcx/dOBYiUdThY39sk/bY=",
+      "dev": true
+    },
+    "array-unique": {
+      "version": "0.3.2",
+      "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz",
+      "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=",
+      "dev": true
+    },
+    "arrify": {
+      "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
+      "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=",
+      "dev": true
+    },
+    "asn1": {
+      "version": "0.2.4",
+      "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz",
+      "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==",
+      "dev": true,
+      "requires": {
+        "safer-buffer": "2.1.2"
+      }
+    },
+    "assert-plus": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
+      "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=",
+      "dev": true
+    },
+    "assign-symbols": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz",
+      "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=",
+      "dev": true
+    },
+    "async": {
+      "version": "2.6.3",
+      "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz",
+      "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==",
+      "dev": true,
+      "requires": {
+        "lodash": "4.17.15"
+      }
+    },
+    "asynckit": {
+      "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+      "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=",
+      "dev": true
+    },
+    "atob": {
+      "version": "2.1.2",
+      "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz",
+      "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==",
+      "dev": true
+    },
+    "autolinker": {
+      "version": "0.28.1",
+      "resolved": "https://registry.npmjs.org/autolinker/-/autolinker-0.28.1.tgz",
+      "integrity": "sha1-BlK0kYgYefB3XazgzcoyM5QqTkc=",
+      "dev": true,
+      "requires": {
+        "gulp-header": "1.8.12"
+      }
+    },
+    "autoprefixer": {
+      "version": "9.7.1",
+      "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.7.1.tgz",
+      "integrity": "sha512-w3b5y1PXWlhYulevrTJ0lizkQ5CyqfeU6BIRDbuhsMupstHQOeb1Ur80tcB1zxSu7AwyY/qCQ7Vvqklh31ZBFw==",
+      "dev": true,
+      "requires": {
+        "browserslist": "4.7.2",
+        "caniuse-lite": "1.0.30001010",
+        "chalk": "2.4.2",
+        "normalize-range": "0.1.2",
+        "num2fraction": "1.2.2",
+        "postcss": "7.0.21",
+        "postcss-value-parser": "4.0.2"
+      }
+    },
+    "aws-sign2": {
+      "version": "0.7.0",
+      "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
+      "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=",
+      "dev": true
+    },
+    "aws4": {
+      "version": "1.8.0",
+      "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz",
+      "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==",
+      "dev": true
+    },
+    "babel-code-frame": {
+      "version": "6.26.0",
+      "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz",
+      "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=",
+      "dev": true,
+      "requires": {
+        "chalk": "1.1.3",
+        "esutils": "2.0.3",
+        "js-tokens": "3.0.2"
+      },
+      "dependencies": {
+        "ansi-styles": {
+          "version": "2.2.1",
+          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz",
+          "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=",
+          "dev": true
+        },
+        "chalk": {
+          "version": "1.1.3",
+          "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz",
+          "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=",
+          "dev": true,
+          "requires": {
+            "ansi-styles": "2.2.1",
+            "escape-string-regexp": "1.0.5",
+            "has-ansi": "2.0.0",
+            "strip-ansi": "3.0.1",
+            "supports-color": "2.0.0"
+          }
+        },
+        "js-tokens": {
+          "version": "3.0.2",
+          "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz",
+          "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=",
+          "dev": true
+        },
+        "supports-color": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz",
+          "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=",
+          "dev": true
+        }
+      }
+    },
+    "babel-plugin-dynamic-import-node": {
+      "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz",
+      "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==",
+      "dev": true,
+      "requires": {
+        "object.assign": "4.1.0"
+      }
+    },
+    "babylon": {
+      "version": "6.18.0",
+      "resolved": "https://registry.npmjs.org/babylon/-/babylon-6.18.0.tgz",
+      "integrity": "sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==",
+      "dev": true
+    },
+    "balanced-match": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
+      "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
+      "dev": true
+    },
+    "base": {
+      "version": "0.11.2",
+      "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz",
+      "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==",
+      "dev": true,
+      "requires": {
+        "cache-base": "1.0.1",
+        "class-utils": "0.3.6",
+        "component-emitter": "1.3.0",
+        "define-property": "1.0.0",
+        "isobject": "3.0.1",
+        "mixin-deep": "1.3.2",
+        "pascalcase": "0.1.1"
+      },
+      "dependencies": {
+        "define-property": {
+          "version": "1.0.0",
+          "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz",
+          "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=",
+          "dev": true,
+          "requires": {
+            "is-descriptor": "1.0.2"
+          }
+        },
+        "is-accessor-descriptor": {
+          "version": "1.0.0",
+          "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz",
+          "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==",
+          "dev": true,
+          "requires": {
+            "kind-of": "6.0.2"
+          }
+        },
+        "is-data-descriptor": {
+          "version": "1.0.0",
+          "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz",
+          "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==",
+          "dev": true,
+          "requires": {
+            "kind-of": "6.0.2"
+          }
+        },
+        "is-descriptor": {
+          "version": "1.0.2",
+          "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz",
+          "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==",
+          "dev": true,
+          "requires": {
+            "is-accessor-descriptor": "1.0.0",
+            "is-data-descriptor": "1.0.0",
+            "kind-of": "6.0.2"
+          }
+        }
+      }
+    },
+    "base64-js": {
+      "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz",
+      "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==",
+      "dev": true
+    },
+    "bcrypt-pbkdf": {
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
+      "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=",
+      "dev": true,
+      "requires": {
+        "tweetnacl": "0.14.5"
+      }
+    },
+    "bin-build": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/bin-build/-/bin-build-3.0.0.tgz",
+      "integrity": "sha512-jcUOof71/TNAI2uM5uoUaDq2ePcVBQ3R/qhxAz1rX7UfvduAL/RXD3jXzvn8cVcDJdGVkiR1shal3OH0ImpuhA==",
+      "dev": true,
+      "requires": {
+        "decompress": "4.2.0",
+        "download": "6.2.5",
+        "execa": "0.7.0",
+        "p-map-series": "1.0.0",
+        "tempfile": "2.0.0"
+      }
+    },
+    "bin-check": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz",
+      "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==",
+      "dev": true,
+      "requires": {
+        "execa": "0.7.0",
+        "executable": "4.1.1"
+      }
+    },
+    "bin-version": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-3.1.0.tgz",
+      "integrity": "sha512-Mkfm4iE1VFt4xd4vH+gx+0/71esbfus2LsnCGe8Pi4mndSPyT+NGES/Eg99jx8/lUGWfu3z2yuB/bt5UB+iVbQ==",
+      "dev": true,
+      "requires": {
+        "execa": "1.0.0",
+        "find-versions": "3.1.0"
+      },
+      "dependencies": {
+        "execa": {
+          "version": "1.0.0",
+          "resolved": "https://registry.npmjs.org/execa/-/execa-1.0.0.tgz",
+          "integrity": "sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==",
+          "dev": true,
+          "requires": {
+            "cross-spawn": "6.0.5",
+            "get-stream": "4.1.0",
+            "is-stream": "1.1.0",
+            "npm-run-path": "2.0.2",
+            "p-finally": "1.0.0",
+            "signal-exit": "3.0.2",
+            "strip-eof": "1.0.0"
+          }
+        },
+        "get-stream": {
+          "version": "4.1.0",
+          "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz",
+          "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==",
+          "dev": true,
+          "requires": {
+            "pump": "3.0.0"
+          }
+        }
+      }
+    },
+    "bin-version-check": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-4.0.0.tgz",
+      "integrity": "sha512-sR631OrhC+1f8Cvs8WyVWOA33Y8tgwjETNPyyD/myRBXLkfS/vl74FmH/lFcRl9KY3zwGh7jFhvyk9vV3/3ilQ==",
+      "dev": true,
+      "requires": {
+        "bin-version": "3.1.0",
+        "semver": "5.7.1",
+        "semver-truncate": "1.1.2"
+      }
+    },
+    "bin-wrapper": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/bin-wrapper/-/bin-wrapper-4.1.0.tgz",
+      "integrity": "sha512-hfRmo7hWIXPkbpi0ZltboCMVrU+0ClXR/JgbCKKjlDjQf6igXa7OwdqNcFWQZPZTgiY7ZpzE3+LjjkLiTN2T7Q==",
+      "dev": true,
+      "requires": {
+        "bin-check": "4.1.0",
+        "bin-version-check": "4.0.0",
+        "download": "7.1.0",
+        "import-lazy": "3.1.0",
+        "os-filter-obj": "2.0.0",
+        "pify": "4.0.1"
+      },
+      "dependencies": {
+        "download": {
+          "version": "7.1.0",
+          "resolved": "https://registry.npmjs.org/download/-/download-7.1.0.tgz",
+          "integrity": "sha512-xqnBTVd/E+GxJVrX5/eUJiLYjCGPwMpdL+jGhGU57BvtcA7wwhtHVbXBeUk51kOpW3S7Jn3BQbN9Q1R1Km2qDQ==",
+          "dev": true,
+          "requires": {
+            "archive-type": "4.0.0",
+            "caw": "2.0.1",
+            "content-disposition": "0.5.3",
+            "decompress": "4.2.0",
+            "ext-name": "5.0.0",
+            "file-type": "8.1.0",
+            "filenamify": "2.1.0",
+            "get-stream": "3.0.0",
+            "got": "8.3.2",
+            "make-dir": "1.3.0",
+            "p-event": "2.3.1",
+            "pify": "3.0.0"
+          },
+          "dependencies": {
+            "pify": {
+              "version": "3.0.0",
+              "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+              "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+              "dev": true
+            }
+          }
+        },
+        "file-type": {
+          "version": "8.1.0",
+          "resolved": "https://registry.npmjs.org/file-type/-/file-type-8.1.0.tgz",
+          "integrity": "sha512-qyQ0pzAy78gVoJsmYeNgl8uH8yKhr1lVhW7JbzJmnlRi0I4R2eEDEJZVKG8agpDnLpacwNbDhLNG/LMdxHD2YQ==",
+          "dev": true
+        },
+        "got": {
+          "version": "8.3.2",
+          "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz",
+          "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==",
+          "dev": true,
+          "requires": {
+            "@sindresorhus/is": "0.7.0",
+            "cacheable-request": "2.1.4",
+            "decompress-response": "3.3.0",
+            "duplexer3": "0.1.4",
+            "get-stream": "3.0.0",
+            "into-stream": "3.1.0",
+            "is-retry-allowed": "1.2.0",
+            "isurl": "1.0.0",
+            "lowercase-keys": "1.0.1",
+            "mimic-response": "1.0.1",
+            "p-cancelable": "0.4.1",
+            "p-timeout": "2.0.1",
+            "pify": "3.0.0",
+            "safe-buffer": "5.1.2",
+            "timed-out": "4.0.1",
+            "url-parse-lax": "3.0.0",
+            "url-to-options": "1.0.1"
+          },
+          "dependencies": {
+            "pify": {
+              "version": "3.0.0",
+              "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+              "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+              "dev": true
+            }
+          }
+        },
+        "make-dir": {
+          "version": "1.3.0",
+          "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz",
+          "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==",
+          "dev": true,
+          "requires": {
+            "pify": "3.0.0"
+          },
+          "dependencies": {
+            "pify": {
+              "version": "3.0.0",
+              "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz",
+              "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=",
+              "dev": true
+            }
+          }
+        },
+        "p-cancelable": {
+          "version": "0.4.1",
+          "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz",
+          "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==",
+          "dev": true
+        },
+        "p-event": {
+          "version": "2.3.1",
+          "resolved": "https://registry.npmjs.org/p-event/-/p-event-2.3.1.tgz",
+          "integrity": "sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA==",
+          "dev": true,
+          "requires": {
+            "p-timeout": "2.0.1"
+          }
+        },
+        "p-timeout": {
+          "version": "2.0.1",
+          "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz",
+          "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==",
+          "dev": true,
+          "requires": {
+            "p-finally": "1.0.0"
+          }
+        },
+        "prepend-http": {
+          "version": "2.0.0",
+          "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz",
+          "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=",
+          "dev": true
+        },
+        "url-parse-lax": {
+          "version": "3.0.0",
+          "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz",
+          "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=",
+          "dev": true,
+          "requires": {
+            "prepend-http": "2.0.0"
+          }
+        }
+      }
+    },
+    "bl": {
+      "version": "1.2.2",
+      "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.2.tgz",
+      "integrity": "sha512-e8tQYnZodmebYDWGH7KMRvtzKXaJHx3BbilrgZCfvyLUYdKpK1t5PSPmpkny/SgiTSCnjfLW7v5rlONXVFkQEA==",
+      "dev": true,
+      "requires": {
+        "readable-stream": "2.3.6",
+        "safe-buffer": "5.1.2"
+      }
+    },
+    "body": {
+      "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/body/-/body-5.1.0.tgz",
+      "integrity": "sha1-5LoM5BCkaTYyM2dgnstOZVMSUGk=",
+      "dev": true,
+      "requires": {
+        "continuable-cache": "0.3.1",
+        "error": "7.2.1",
+        "raw-body": "1.1.7",
+        "safe-json-parse": "1.0.1"
+      },
+      "dependencies": {
+        "bytes": {
+          "version": "1.0.0",
+          "resolved": "https://registry.npmjs.org/bytes/-/bytes-1.0.0.tgz",
+          "integrity": "sha1-NWnt6Lo0MV+rmcPpLLBMciDeH6g=",
+          "dev": true
+        },
+        "raw-body": {
+          "version": "1.1.7",
+          "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-1.1.7.tgz",
+          "integrity": "sha1-HQJ8K/oRasxmI7yo8AAWVyqH1CU=",
+          "dev": true,
+          "requires": {
+            "bytes": "1.0.0",
+            "string_decoder": "0.10.31"
+          }
+        },
+        "string_decoder": {
+          "version": "0.10.31",
+          "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
... 27796 lines suppressed ...


Mime
View raw message