File spark.spec of Package spark

#
# spec file for package spark
#
# Copyright (c) 2019 SUSE LINUX GmbH, Nuernberg, Germany.
# Copyright (c) 2017 cadenzajon
#
# All modifications and additions to the file contributed by third parties
# remain the property of their copyright owners, unless otherwise agreed
# upon. The license for this file, and modifications and additions to the
# file, is the same license as for the pristine package itself (unless the
# license for the pristine package is not an Open Source License, in which
# case the license is the MIT License). An "Open Source License" is a
# license that conforms to the Open Source Definition (Version 1.9)
# published by the Open Source Initiative.

# Please submit bugfixes or comments via http://bugs.opensuse.org/
#


%define sname spark
%define username spark
%define groupname spark
%define sparkdir %{_datadir}/%{sname}
%define workername %{sname}-worker
%define mastername %{sname}-master
%define scalaversion 2.10
Name:           %{sname}
Version:        2.2.3
Release:        0
Summary:        Unified Analytics Engine for Big Data
License:        Apache-2.0
Group:          Development/Libraries/Java
Url:            http://spark.apache.org/
Source0:        http://apache.org/dist/spark/spark-%{version}/spark-%{version}.tgz
Source1:        build.sh
Source2:        %{workername}.service
Source3:        %{mastername}.service
Source7:        spark-tmpfiles.conf
# PATCH-FIX-OPENSUSE fix-spark-home-and-conf.patch
Patch0:         fix-spark-home-and-conf.patch
BuildRequires:  fdupes
BuildRequires:  java-1_8_0-openjdk
BuildRequires:  java-1_8_0-openjdk-devel
BuildRequires:  spark-kit = %{version}
BuildRequires:  systemd-rpm-macros
BuildRequires:  tar
BuildRequires:  xz
%{?systemd_requires}
Requires:       java-1_8_0-openjdk
Requires(pre):  shadow
BuildArch:      noarch

%description
Apache Spark is a fast and general-purpose cluster computing system. It provides
high-level APIs in Java, Scala, Python and R, and an optimized engine that
supports general execution graphs. It also supports a rich set of higher-level
tools including Spark SQL for SQL and structured data processing, MLlib for
machine learning, GraphX for graph processing, and Spark Streaming.

%prep
%setup -q -c -n src
%patch0 -p1
cp -f %{SOURCE1} .
cp -Rf %{_datadir}/tetra ../kit

%build
cd ..
VERSION=%{version} sh src/build.sh

%install
export NO_BRP_CHECK_BYTECODE_VERSION=true

# directories
install -d -m 755 %{buildroot}%{_bindir}
install -d -m 755 %{buildroot}%{_sbindir}
install -d -m 755 %{buildroot}%{_unitdir}
install -d -m 755 %{buildroot}/%{_sysconfdir}/%{name}
install -d -m 750 %{buildroot}%{_localstatedir}/log/%{name}
install -d -m 755 %{buildroot}%{_localstatedir}/lib/%{name}
install -d -m 755 %{buildroot}%{sparkdir}

cp -a %{name}-%{version}/dist/* %{buildroot}%{sparkdir}

# jars, note other versions of spark-streaming-kafka are available from the kit
install -D -m 644 %{name}-%{version}/./external/kafka-0-8/target/spark-streaming-kafka-0-8_%{scalaversion}-%{version}.jar %{buildroot}%{sparkdir}/jars/spark-streaming-kafka-0-8_%{scalaversion}-%{version}.jar

# systemd
install -D -m 444 %{SOURCE2} %{buildroot}%{_unitdir}/%{workername}.service
install -D -m 444 %{SOURCE3} %{buildroot}%{_unitdir}/%{mastername}.service
ln -s %{_sbindir}/service %{buildroot}%{_sbindir}/rc%{workername}
ln -s %{_sbindir}/service %{buildroot}%{_sbindir}/rc%{mastername}

# tmpfiles
install -D -m 644 %{SOURCE7} %{buildroot}/%_tmpfilesdir/spark.conf

# bin files
ln -sr %{buildroot}/%{sparkdir}/bin/spark-submit %{buildroot}%{_bindir}/spark-submit
ln -sr %{buildroot}/%{sparkdir}/bin/spark-class %{buildroot}%{_bindir}/spark-class
ln -sr %{buildroot}/%{sparkdir}/bin/spark-shell %{buildroot}%{_bindir}/spark-shell
ln -sr %{buildroot}/%{sparkdir}/bin/spark-sql %{buildroot}%{_bindir}/spark-sql
ln -sr %{buildroot}/%{sparkdir}/bin/find-spark-home %{buildroot}%{_bindir}/find-spark-home

# set SPARK_HOME and SPARK_CONF_DIR, so that spark CLI tools can find spark libraries and configuration files
echo '# SUSE specific file locations, generated by .rpm installation' > %{buildroot}%{sparkdir}/bin/find-spark-home
echo 'export SPARK_HOME=%{_datadir}/spark' >> %{buildroot}%{sparkdir}/bin/find-spark-home
echo 'export SPARK_CONF_DIR=%{_sysconfdir}/spark' >> %{buildroot}%{sparkdir}/bin/find-spark-home

# config files
install -D -m 644 %{name}-%{version}/dist/conf/spark-defaults.conf.template %{buildroot}/%{_sysconfdir}/spark/spark-defaults.conf
install -D -m 755 %{name}-%{version}/dist/conf/spark-env.sh.template %{buildroot}/%{_sysconfdir}/spark/spark-env
# adjust default config (spark-env.sh is read during startup)
cat <<EOF >> %{buildroot}/%{_sysconfdir}/spark/spark-env

SPARK_CONF_DIR=/etc/spark/
SPARK_LOG_DIR=/var/log/spark/
SPARK_PID_DIR=/run/spark/
SPARK_CLASSPATH=/usr/share/spark/lib/*
SPARK_WORKER_DIR=/run/spark/work/
SPARK_MASTER_HOST=127.0.0.1
# The spark master addresses used by the workers
SPARK_MASTERS="spark://127.0.0.1:7077"
# The version of Scala used in the build
SPARK_SCALA_VERSION=2.10
EOF

rm -f %{sparkdir}/python/.gitignore
rm -rf %{sparkdir}/conf

%fdupes %{buildroot}%{sparkdir}

%pre
%service_add_pre %{workername}.service
%service_add_pre %{mastername}.service
# create system user/group and homedir
getent group %{groupname} >/dev/null || groupadd -r %{groupname}
getent passwd %{username} >/dev/null || useradd -r -g %{groupname} -d /var/lib/spark -s /sbin/nologin -c "user for Apache Spark" %{username}
exit 0

%post
%tmpfiles_create %{_tmpfilesdir}/spark.conf
%service_add_post %{workername}.service
%service_add_post %{mastername}.service

%preun
%service_del_preun %{workername}.service
%service_del_preun %{mastername}.service

%postun
%service_del_postun %{workername}.service
%service_del_postun %{mastername}.service

%files
%defattr(-,root,root)
%{sparkdir}
%{_unitdir}/spark-master.service
%{_unitdir}/spark-worker.service
%_tmpfilesdir/spark.conf
%{_sbindir}/rc%{workername}
%{_sbindir}/rc%{mastername}
%{_bindir}/spark-submit
%{_bindir}/spark-class
%{_bindir}/spark-shell
%{_bindir}/spark-sql
%{_bindir}/find-spark-home
%dir %attr(-,%{username},%{groupname}) /%{_sysconfdir}/%{name}
%dir %attr(0750, %{username}, %{groupname}) %{_localstatedir}/log/%{name}
%dir %attr(0755, %{username}, %{groupname}) %{_localstatedir}/lib/%{name}
%config(noreplace) %attr(-,%{username},%{groupname}) /%{_sysconfdir}/%{name}/spark-env
%config(noreplace) %attr(-,%{username},%{groupname}) /%{_sysconfdir}/%{name}/spark-defaults.conf

%changelog
openSUSE Build Service is sponsored by