0025a8397f
## What changes were proposed in this pull request? - Remove some AccumulableInfo .apply() methods - Remove non-label-specific multiclass precision/recall/fScore in favor of accuracy - Remove toDegrees/toRadians in favor of degrees/radians (SparkR: only deprecated) - Remove approxCountDistinct in favor of approx_count_distinct (SparkR: only deprecated) - Remove unused Python StorageLevel constants - Remove Dataset unionAll in favor of union - Remove unused multiclass option in libsvm parsing - Remove references to deprecated spark configs like spark.yarn.am.port - Remove TaskContext.isRunningLocally - Remove ShuffleMetrics.shuffle* methods - Remove BaseReadWrite.context in favor of session - Remove Column.!== in favor of =!= - Remove Dataset.explode - Remove Dataset.registerTempTable - Remove SQLContext.getOrCreate, setActive, clearActive, constructors Not touched yet - everything else in MLLib - HiveContext - Anything deprecated more recently than 2.0.0, generally ## How was this patch tested? Existing tests Closes #22921 from srowen/SPARK-25908. Lead-authored-by: Sean Owen <sean.owen@databricks.com> Co-authored-by: hyukjinkwon <gurwls223@apache.org> Co-authored-by: Sean Owen <srowen@gmail.com> Signed-off-by: Sean Owen <sean.owen@databricks.com>
59 lines
2.6 KiB
Python
59 lines
2.6 KiB
Python
#
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
#
|
|
|
|
__all__ = ["StorageLevel"]
|
|
|
|
|
|
class StorageLevel(object):
|
|
|
|
"""
|
|
Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,
|
|
whether to drop the RDD to disk if it falls out of memory, whether to keep the data in memory
|
|
in a JAVA-specific serialized format, and whether to replicate the RDD partitions on multiple
|
|
nodes. Also contains static constants for some commonly used storage levels, MEMORY_ONLY.
|
|
Since the data is always serialized on the Python side, all the constants use the serialized
|
|
formats.
|
|
"""
|
|
|
|
def __init__(self, useDisk, useMemory, useOffHeap, deserialized, replication=1):
|
|
self.useDisk = useDisk
|
|
self.useMemory = useMemory
|
|
self.useOffHeap = useOffHeap
|
|
self.deserialized = deserialized
|
|
self.replication = replication
|
|
|
|
def __repr__(self):
|
|
return "StorageLevel(%s, %s, %s, %s, %s)" % (
|
|
self.useDisk, self.useMemory, self.useOffHeap, self.deserialized, self.replication)
|
|
|
|
def __str__(self):
|
|
result = ""
|
|
result += "Disk " if self.useDisk else ""
|
|
result += "Memory " if self.useMemory else ""
|
|
result += "OffHeap " if self.useOffHeap else ""
|
|
result += "Deserialized " if self.deserialized else "Serialized "
|
|
result += "%sx Replicated" % self.replication
|
|
return result
|
|
|
|
StorageLevel.DISK_ONLY = StorageLevel(True, False, False, False)
|
|
StorageLevel.DISK_ONLY_2 = StorageLevel(True, False, False, False, 2)
|
|
StorageLevel.MEMORY_ONLY = StorageLevel(False, True, False, False)
|
|
StorageLevel.MEMORY_ONLY_2 = StorageLevel(False, True, False, False, 2)
|
|
StorageLevel.MEMORY_AND_DISK = StorageLevel(True, True, False, False)
|
|
StorageLevel.MEMORY_AND_DISK_2 = StorageLevel(True, True, False, False, 2)
|
|
StorageLevel.OFF_HEAP = StorageLevel(True, True, True, False, 1)
|