File mozilla-silence-no-return-type.patch of Package Vulpes

# HG changeset patch
# Parent  9307259332078ad93f044dc64f06462d66d80aef

diff --git a/gfx/skia/skia/include/codec/SkEncodedOrigin.h b/gfx/skia/skia/include/codec/SkEncodedOrigin.h
--- a/gfx/skia/skia/include/codec/SkEncodedOrigin.h
+++ b/gfx/skia/skia/include/codec/SkEncodedOrigin.h
@@ -36,16 +36,17 @@ static inline SkMatrix SkEncodedOriginTo
         case kBottomRight_SkEncodedOrigin: return SkMatrix::MakeAll(-1,  0, w,  0, -1, h, 0, 0, 1);
         case  kBottomLeft_SkEncodedOrigin: return SkMatrix::MakeAll( 1,  0, 0,  0, -1, h, 0, 0, 1);
         case     kLeftTop_SkEncodedOrigin: return SkMatrix::MakeAll( 0,  1, 0,  1,  0, 0, 0, 0, 1);
         case    kRightTop_SkEncodedOrigin: return SkMatrix::MakeAll( 0, -1, w,  1,  0, 0, 0, 0, 1);
         case kRightBottom_SkEncodedOrigin: return SkMatrix::MakeAll( 0, -1, w, -1,  0, h, 0, 0, 1);
         case  kLeftBottom_SkEncodedOrigin: return SkMatrix::MakeAll( 0,  1, 0, -1,  0, h, 0, 0, 1);
     }
     SK_ABORT("Unexpected origin");
+    SkUNREACHABLE;
 }
 
 /**
  * Return true if the encoded origin includes a 90 degree rotation, in which case the width
  * and height of the source data are swapped relative to a correctly oriented destination.
  */
 static inline bool SkEncodedOriginSwapsWidthHeight(SkEncodedOrigin origin) {
     return origin >= kLeftTop_SkEncodedOrigin;
diff --git a/gfx/skia/skia/include/private/gpu/ganesh/GrTypesPriv.h b/gfx/skia/skia/include/private/gpu/ganesh/GrTypesPriv.h
--- a/gfx/skia/skia/include/private/gpu/ganesh/GrTypesPriv.h
+++ b/gfx/skia/skia/include/private/gpu/ganesh/GrTypesPriv.h
@@ -296,16 +296,17 @@ static inline bool GrTextureTypeHasRestr
             return false;
         case GrTextureType::kRectangle:
             return true;
         case GrTextureType::kExternal:
             return true;
         default:
             SK_ABORT("Unexpected texture type");
     }
+    SkUNREACHABLE;
 }
 
 //////////////////////////////////////////////////////////////////////////////
 
 /**
  * Types used to describe format of vertices in arrays.
  */
 enum GrVertexAttribType {
diff --git a/gfx/skia/skia/src/core/SkDescriptor.cpp b/gfx/skia/skia/src/core/SkDescriptor.cpp
--- a/gfx/skia/skia/src/core/SkDescriptor.cpp
+++ b/gfx/skia/skia/src/core/SkDescriptor.cpp
@@ -21,16 +21,17 @@ std::unique_ptr<SkDescriptor> SkDescript
     SkASSERT(length >= sizeof(SkDescriptor) && SkAlign4(length) == length);
     void* allocation = ::operator new(length);
     return std::unique_ptr<SkDescriptor>(new (allocation) SkDescriptor{});
 }
 
 void SkDescriptor::operator delete(void* p) { ::operator delete(p); }
 void* SkDescriptor::operator new(size_t) {
     SK_ABORT("Descriptors are created with placement new.");
+    SkUNREACHABLE;
 }
 
 void SkDescriptor::flatten(SkWriteBuffer& buffer) const {
     buffer.writePad32(static_cast<const void*>(this), this->fLength);
 }
 
 void* SkDescriptor::addEntry(uint32_t tag, size_t length, const void* data) {
     SkASSERT(tag);
diff --git a/gfx/skia/skia/src/core/SkGeometry.h b/gfx/skia/skia/src/core/SkGeometry.h
--- a/gfx/skia/skia/src/core/SkGeometry.h
+++ b/gfx/skia/skia/src/core/SkGeometry.h
@@ -277,28 +277,30 @@ static inline bool SkCubicIsDegenerate(S
         case SkCubicType::kLocalCusp:
         case SkCubicType::kCuspAtInfinity:
             return false;
         case SkCubicType::kQuadratic:
         case SkCubicType::kLineOrPoint:
             return true;
     }
     SK_ABORT("Invalid SkCubicType");
+    SkUNREACHABLE;
 }
 
 static inline const char* SkCubicTypeName(SkCubicType type) {
     switch (type) {
         case SkCubicType::kSerpentine: return "kSerpentine";
         case SkCubicType::kLoop: return "kLoop";
         case SkCubicType::kLocalCusp: return "kLocalCusp";
         case SkCubicType::kCuspAtInfinity: return "kCuspAtInfinity";
         case SkCubicType::kQuadratic: return "kQuadratic";
         case SkCubicType::kLineOrPoint: return "kLineOrPoint";
     }
     SK_ABORT("Invalid SkCubicType");
+    SkUNREACHABLE;
 }
 
 /** Returns the cubic classification.
 
     t[],s[] are set to the two homogeneous parameter values at which points the lines L & M
     intersect with K, sorted from smallest to largest and oriented so positive values of the
     implicit are on the "left" side. For a serpentine curve they are the inflection points. For a
     loop they are the double point. For a local cusp, they are both equal and denote the cusp point.
diff --git a/gfx/skia/skia/src/core/SkTextBlob.cpp b/gfx/skia/skia/src/core/SkTextBlob.cpp
--- a/gfx/skia/skia/src/core/SkTextBlob.cpp
+++ b/gfx/skia/skia/src/core/SkTextBlob.cpp
@@ -208,16 +208,17 @@ unsigned SkTextBlob::ScalarsPerGlyph(Gly
 }
 
 void SkTextBlob::operator delete(void* p) {
     sk_free(p);
 }
 
 void* SkTextBlob::operator new(size_t) {
     SK_ABORT("All blobs are created by placement new.");
+    SkUNREACHABLE;
 }
 
 void* SkTextBlob::operator new(size_t, void* p) {
     return p;
 }
 
 SkTextBlobRunIterator::SkTextBlobRunIterator(const SkTextBlob* blob)
     : fCurrentRun(SkTextBlob::RunRecord::First(blob)) {
diff --git a/gfx/skia/skia/src/core/SkTypeface_remote.h b/gfx/skia/skia/src/core/SkTypeface_remote.h
--- a/gfx/skia/skia/src/core/SkTypeface_remote.h
+++ b/gfx/skia/skia/src/core/SkTypeface_remote.h
@@ -103,83 +103,97 @@ public:
 
     SkTypefaceID remoteTypefaceID() const {return fTypefaceID;}
 
     int glyphCount() const {return fGlyphCount;}
 
     bool isLogging() const {return fIsLogging;}
 
 protected:
-    int onGetUPEM() const override { SK_ABORT("Should never be called."); }
+    int onGetUPEM() const override { SK_ABORT("Should never be called."); SkUNREACHABLE; }
     std::unique_ptr<SkStreamAsset> onOpenStream(int* ttcIndex) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     sk_sp<SkTypeface> onMakeClone(const SkFontArguments& args) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     bool onGlyphMaskNeedsCurrentColor() const override {
         return fGlyphMaskNeedsCurrentColor;
     }
     int onGetVariationDesignPosition(SkFontArguments::VariationPosition::Coordinate coordinates[],
                                      int coordinateCount) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     int onGetVariationDesignParameters(SkFontParameters::Variation::Axis parameters[],
                                        int parameterCount) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     void onGetFamilyName(SkString* familyName) const override {
         // Used by SkStrikeCache::DumpMemoryStatistics.
         *familyName = "";
     }
     bool onGetPostScriptName(SkString*) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     SkTypeface::LocalizedStrings* onCreateFamilyNameIterator() const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     int onGetTableTags(SkFontTableTag tags[]) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     size_t onGetTableData(SkFontTableTag, size_t offset, size_t length, void* data) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     std::unique_ptr<SkScalerContext> onCreateScalerContext(
         const SkScalerContextEffects& effects, const SkDescriptor* desc) const override
     {
         return std::make_unique<SkScalerContextProxy>(
                 sk_ref_sp(const_cast<SkTypefaceProxy*>(this)), effects, desc, fDiscardableManager);
     }
     void onFilterRec(SkScalerContextRec* rec) const override {
         // The rec filtering is already applied by the server when generating
         // the glyphs.
     }
     void onGetFontDescriptor(SkFontDescriptor*, bool*) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     void getGlyphToUnicodeMap(SkUnichar*) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
 
     void getPostScriptGlyphNames(SkString*) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
 
     std::unique_ptr<SkAdvancedTypefaceMetrics> onGetAdvancedMetrics() const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     void onCharsToGlyphs(const SkUnichar* chars, int count, SkGlyphID glyphs[]) const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
     int onCountGlyphs() const override {
         return this->glyphCount();
     }
 
     void* onGetCTFontRef() const override {
         SK_ABORT("Should never be called.");
+        SkUNREACHABLE;
     }
 
 private:
     const SkTypefaceID                              fTypefaceID;
     const int                                       fGlyphCount;
     const bool                                      fIsLogging;
     const bool                                      fGlyphMaskNeedsCurrentColor;
     sk_sp<SkStrikeClient::DiscardableHandleManager> fDiscardableManager;
diff --git a/gfx/skia/skia/src/sksl/SkSLOperator.cpp b/gfx/skia/skia/src/sksl/SkSLOperator.cpp
--- a/gfx/skia/skia/src/sksl/SkSLOperator.cpp
+++ b/gfx/skia/skia/src/sksl/SkSLOperator.cpp
@@ -46,17 +46,17 @@ OperatorPrecedence Operator::getBinaryPr
         case Kind::SLASHEQ:      // fall through
         case Kind::PERCENTEQ:    // fall through
         case Kind::SHLEQ:        // fall through
         case Kind::SHREQ:        // fall through
         case Kind::BITWISEANDEQ: // fall through
         case Kind::BITWISEXOREQ: // fall through
         case Kind::BITWISEOREQ:  return OperatorPrecedence::kAssignment;
         case Kind::COMMA:        return OperatorPrecedence::kSequence;
-        default: SK_ABORT("unsupported binary operator");
+        default: SkUNREACHABLE;
     }
 }
 
 const char* Operator::operatorName() const {
     switch (this->kind()) {
         case Kind::PLUS:         return " + ";
         case Kind::MINUS:        return " - ";
         case Kind::STAR:         return " * ";
diff --git a/gfx/skia/skia/src/sksl/ir/SkSLType.h b/gfx/skia/skia/src/sksl/ir/SkSLType.h
--- a/gfx/skia/skia/src/sksl/ir/SkSLType.h
+++ b/gfx/skia/skia/src/sksl/ir/SkSLType.h
@@ -463,16 +463,17 @@ public:
      * always match `componentType()`.
      */
     virtual const Type& slotType(size_t) const {
         return *this;
     }
 
     virtual SkSpan<const Field> fields() const {
         SK_ABORT("Internal error: not a struct");
+        SkUNREACHABLE;
     }
 
     /**
      * For generic types, returns the types that this generic type can substitute for.
      */
     virtual SkSpan<const Type* const> coercibleTypes() const {
         SkDEBUGFAIL("Internal error: not a generic type");
         return {};
diff --git a/gfx/skia/skia/src/sksl/tracing/SkSLDebugTracePriv.cpp b/gfx/skia/skia/src/sksl/tracing/SkSLDebugTracePriv.cpp
--- a/gfx/skia/skia/src/sksl/tracing/SkSLDebugTracePriv.cpp
+++ b/gfx/skia/skia/src/sksl/tracing/SkSLDebugTracePriv.cpp
@@ -196,11 +196,12 @@ void DebugTracePriv::dump(SkWStream* o) 
         }
     }
 }
 
 void DebugTracePriv::writeTrace(SkWStream* w) const {
 }
 
 bool DebugTracePriv::readTrace(SkStream* r) {
+  return false;
 }
 
 }  // namespace SkSL
diff --git a/gfx/skia/skia/src/utils/SkShadowUtils.cpp b/gfx/skia/skia/src/utils/SkShadowUtils.cpp
--- a/gfx/skia/skia/src/utils/SkShadowUtils.cpp
+++ b/gfx/skia/skia/src/utils/SkShadowUtils.cpp
@@ -136,16 +136,17 @@ struct SpotVerticesFactory {
                 }
                 return false;
             case OccluderType::kDirectional:
             case OccluderType::kDirectionalTransparent:
                 *translate = that.fOffset - fOffset;
                 return true;
         }
         SK_ABORT("Uninitialized occluder type?");
+        SkUNREACHABLE;
     }
 
     sk_sp<SkVertices> makeVertices(const SkPath& path, const SkMatrix& ctm,
                                    SkVector* translate) const {
         bool transparent = fOccluderType == OccluderType::kPointTransparent ||
                            fOccluderType == OccluderType::kDirectionalTransparent;
         bool directional = fOccluderType == OccluderType::kDirectional ||
                            fOccluderType == OccluderType::kDirectionalTransparent;
diff --git a/intl/icu/source/i18n/number_rounding.cpp b/intl/icu/source/i18n/number_rounding.cpp
--- a/intl/icu/source/i18n/number_rounding.cpp
+++ b/intl/icu/source/i18n/number_rounding.cpp
@@ -282,27 +282,29 @@ Precision IncrementPrecision::withMinFra
 }
 
 FractionPrecision Precision::constructFraction(int32_t minFrac, int32_t maxFrac) {
     FractionSignificantSettings settings;
     settings.fMinFrac = static_cast<digits_t>(minFrac);
     settings.fMaxFrac = static_cast<digits_t>(maxFrac);
     settings.fMinSig = -1;
     settings.fMaxSig = -1;
+    settings.fPriority = UNUM_ROUNDING_PRIORITY_RELAXED;
     PrecisionUnion union_;
     union_.fracSig = settings;
     return {RND_FRACTION, union_};
 }
 
 Precision Precision::constructSignificant(int32_t minSig, int32_t maxSig) {
     FractionSignificantSettings settings;
     settings.fMinFrac = -1;
     settings.fMaxFrac = -1;
     settings.fMinSig = static_cast<digits_t>(minSig);
     settings.fMaxSig = static_cast<digits_t>(maxSig);
+    settings.fPriority = UNUM_ROUNDING_PRIORITY_RELAXED;
     PrecisionUnion union_;
     union_.fracSig = settings;
     return {RND_SIGNIFICANT, union_};
 }
 
 Precision
 Precision::constructFractionSignificant(
         const FractionPrecision &base,
diff --git a/js/src/irregexp/imported/regexp-parser.cc b/js/src/irregexp/imported/regexp-parser.cc
--- a/js/src/irregexp/imported/regexp-parser.cc
+++ b/js/src/irregexp/imported/regexp-parser.cc
@@ -2780,16 +2780,17 @@ bool MayContainStrings(ClassSetOperandTy
       return false;
     case ClassSetOperandType::kCharacterClassEscape:
     case ClassSetOperandType::kClassStringDisjunction:
       return operand->AsClassSetOperand()->has_strings();
     case ClassSetOperandType::kNestedClass:
       if (operand->IsClassRanges()) return false;
       return operand->AsClassSetExpression()->may_contain_strings();
   }
+  UNREACHABLE();
 }
 
 }  // namespace
 
 template <class CharT>
 void RegExpParserImpl<CharT>::AddMaybeSimpleCaseFoldedRange(
     ZoneList<CharacterRange>* ranges, CharacterRange new_range) {
   DCHECK(unicode_sets());
diff --git a/third_party/libwebrtc/api/adaptation/resource.cc b/third_party/libwebrtc/api/adaptation/resource.cc
--- a/third_party/libwebrtc/api/adaptation/resource.cc
+++ b/third_party/libwebrtc/api/adaptation/resource.cc
@@ -17,16 +17,17 @@ namespace webrtc {
 const char* ResourceUsageStateToString(ResourceUsageState usage_state) {
   switch (usage_state) {
     case ResourceUsageState::kOveruse:
       return "kOveruse";
     case ResourceUsageState::kUnderuse:
       return "kUnderuse";
   }
   RTC_CHECK_NOTREACHED();
+  return nullptr;
 }
 
 ResourceListener::~ResourceListener() {}
 
 Resource::Resource() {}
 
 Resource::~Resource() {}
 
diff --git a/third_party/libwebrtc/api/rtp_parameters.cc b/third_party/libwebrtc/api/rtp_parameters.cc
--- a/third_party/libwebrtc/api/rtp_parameters.cc
+++ b/third_party/libwebrtc/api/rtp_parameters.cc
@@ -32,16 +32,17 @@ const char* DegradationPreferenceToStrin
     case DegradationPreference::MAINTAIN_FRAMERATE:
       return "maintain-framerate";
     case DegradationPreference::MAINTAIN_RESOLUTION:
       return "maintain-resolution";
     case DegradationPreference::BALANCED:
       return "balanced";
   }
   RTC_CHECK_NOTREACHED();
+  return "";
 }
 
 const double kDefaultBitratePriority = 1.0;
 
 RtcpFeedback::RtcpFeedback() = default;
 RtcpFeedback::RtcpFeedback(RtcpFeedbackType type) : type(type) {}
 RtcpFeedback::RtcpFeedback(RtcpFeedbackType type,
                            RtcpFeedbackMessageType message_type)
diff --git a/third_party/libwebrtc/api/video/video_frame_buffer.cc b/third_party/libwebrtc/api/video/video_frame_buffer.cc
--- a/third_party/libwebrtc/api/video/video_frame_buffer.cc
+++ b/third_party/libwebrtc/api/video/video_frame_buffer.cc
@@ -109,16 +109,18 @@ const char* VideoFrameBufferTypeToString
       return "kI210";
     case VideoFrameBuffer::Type::kI410:
       return "kI410";
     case VideoFrameBuffer::Type::kNV12:
       return "kNV12";
     default:
       RTC_DCHECK_NOTREACHED();
   }
+  RTC_DCHECK_NOTREACHED();
+  return nullptr;
 }
 
 int I420BufferInterface::ChromaWidth() const {
   return (width() + 1) / 2;
 }
 
 int I420BufferInterface::ChromaHeight() const {
   return (height() + 1) / 2;
diff --git a/third_party/libwebrtc/api/video_codecs/video_codec.cc b/third_party/libwebrtc/api/video_codecs/video_codec.cc
--- a/third_party/libwebrtc/api/video_codecs/video_codec.cc
+++ b/third_party/libwebrtc/api/video_codecs/video_codec.cc
@@ -161,16 +161,17 @@ const char* CodecTypeToPayloadString(Vid
     case kVideoCodecH264:
       return kPayloadNameH264;
     case kVideoCodecGeneric:
       return kPayloadNameGeneric;
     case kVideoCodecH265:
       return kPayloadNameH265;
   }
   RTC_CHECK_NOTREACHED();
+  return "";
 }
 
 VideoCodecType PayloadStringToCodecType(const std::string& name) {
   if (absl::EqualsIgnoreCase(name, kPayloadNameVp8))
     return kVideoCodecVP8;
   if (absl::EqualsIgnoreCase(name, kPayloadNameVp9))
     return kVideoCodecVP9;
   if (absl::EqualsIgnoreCase(name, kPayloadNameAv1) ||
diff --git a/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc
--- a/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc
+++ b/third_party/libwebrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc
@@ -189,16 +189,17 @@ class VideoEncoderSoftwareFallbackWrappe
         [[fallthrough]];
       case EncoderState::kMainEncoderUsed:
         return encoder_.get();
       case EncoderState::kFallbackDueToFailure:
       case EncoderState::kForcedFallback:
         return fallback_encoder_.get();
     }
     RTC_CHECK_NOTREACHED();
+    return nullptr;
   }
 
   // Updates encoder with last observed parameters, such as callbacks, rates,
   // etc.
   void PrimeEncoder(VideoEncoder* encoder) const;
 
   // Settings used in the last InitEncode call and used if a dynamic fallback to
   // software is required.
@@ -387,16 +388,17 @@ int32_t VideoEncoderSoftwareFallbackWrap
     case EncoderState::kMainEncoderUsed: {
       return EncodeWithMainEncoder(frame, frame_types);
     }
     case EncoderState::kFallbackDueToFailure:
     case EncoderState::kForcedFallback:
       return fallback_encoder_->Encode(frame, frame_types);
   }
   RTC_CHECK_NOTREACHED();
+  return WEBRTC_VIDEO_CODEC_ERROR;
 }
 
 int32_t VideoEncoderSoftwareFallbackWrapper::EncodeWithMainEncoder(
     const VideoFrame& frame,
     const std::vector<VideoFrameType>* frame_types) {
   int32_t ret = encoder_->Encode(frame, frame_types);
   // If requested, try a software fallback.
   bool fallback_requested = (ret == WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE);
diff --git a/third_party/libwebrtc/call/adaptation/video_stream_adapter.cc b/third_party/libwebrtc/call/adaptation/video_stream_adapter.cc
--- a/third_party/libwebrtc/call/adaptation/video_stream_adapter.cc
+++ b/third_party/libwebrtc/call/adaptation/video_stream_adapter.cc
@@ -164,16 +164,17 @@ const char* Adaptation::StatusToString(A
     case Status::kInsufficientInput:
       return "kInsufficientInput";
     case Status::kAdaptationDisabled:
       return "kAdaptationDisabled";
     case Status::kRejectedByConstraint:
       return "kRejectedByConstraint";
   }
   RTC_CHECK_NOTREACHED();
+  return "";
 }
 
 Adaptation::Adaptation(int validation_id,
                        VideoSourceRestrictions restrictions,
                        VideoAdaptationCounters counters,
                        VideoStreamInputState input_state)
     : validation_id_(validation_id),
       status_(Status::kValid),
@@ -386,16 +387,17 @@ VideoStreamAdapter::RestrictionsOrState 
     case DegradationPreference::MAINTAIN_RESOLUTION: {
       // Scale up framerate.
       return IncreaseFramerate(input_state, current_restrictions_);
     }
     case DegradationPreference::DISABLED:
       return Adaptation::Status::kAdaptationDisabled;
   }
   RTC_CHECK_NOTREACHED();
+  return Adaptation::Status::kAdaptationDisabled;
 }
 
 Adaptation VideoStreamAdapter::GetAdaptationDown() {
   RTC_DCHECK_RUN_ON(&sequence_checker_);
   VideoStreamInputState input_state = input_state_provider_->InputState();
   ++adaptation_validation_id_;
   RestrictionsOrState restrictions_or_state =
       GetAdaptationDownStep(input_state, current_restrictions_);
@@ -468,16 +470,17 @@ VideoStreamAdapter::GetAdaptationDownSte
     }
     case DegradationPreference::MAINTAIN_RESOLUTION: {
       return DecreaseFramerate(input_state, current_restrictions);
     }
     case DegradationPreference::DISABLED:
       return Adaptation::Status::kAdaptationDisabled;
   }
   RTC_CHECK_NOTREACHED();
+  return Adaptation::Status::kAdaptationDisabled;
 }
 
 VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution(
     const VideoStreamInputState& input_state,
     const RestrictionsWithCounters& current_restrictions) {
   int target_pixels =
       GetLowerResolutionThan(input_state.frame_size_pixels().value());
   // Use single active stream if set, this stream could be lower than the input.
@@ -621,16 +624,18 @@ Adaptation VideoStreamAdapter::GetAdaptD
     case DegradationPreference::MAINTAIN_FRAMERATE:
       return GetAdaptationDown();
     case DegradationPreference::BALANCED: {
       return RestrictionsOrStateToAdaptation(
           GetAdaptDownResolutionStepForBalanced(input_state), input_state);
     }
   }
   RTC_CHECK_NOTREACHED();
+  return RestrictionsOrStateToAdaptation(
+         Adaptation::Status::kAdaptationDisabled, input_state);
 }
 
 VideoStreamAdapter::RestrictionsOrState
 VideoStreamAdapter::GetAdaptDownResolutionStepForBalanced(
     const VideoStreamInputState& input_state) const {
   // Adapt twice if the first adaptation did not decrease resolution.
   auto first_step = GetAdaptationDownStep(input_state, current_restrictions_);
   if (!absl::holds_alternative<RestrictionsWithCounters>(first_step)) {
diff --git a/third_party/libwebrtc/call/rtp_payload_params.cc b/third_party/libwebrtc/call/rtp_payload_params.cc
--- a/third_party/libwebrtc/call/rtp_payload_params.cc
+++ b/third_party/libwebrtc/call/rtp_payload_params.cc
@@ -422,17 +422,18 @@ std::optional<FrameDependencyStructure> 
       }
       return structure;
     }
     case VideoCodecType::kVideoCodecAV1:
     case VideoCodecType::kVideoCodecH264:
     case VideoCodecType::kVideoCodecH265:
       return std::nullopt;
   }
-  RTC_DCHECK_NOTREACHED() << "Unsupported codec.";
+  //RTC_DCHECK_NOTREACHED() << "Unsupported codec.";
+  RTC_CHECK_NOTREACHED();
 }
 
 void RtpPayloadParams::GenericToGeneric(int64_t frame_id,
                                         bool is_keyframe,
                                         RTPVideoHeader* rtp_video_header) {
   RTPVideoHeader::GenericDescriptorInfo& generic =
       rtp_video_header->generic.emplace();
 
diff --git a/third_party/libwebrtc/call/video_send_stream.cc b/third_party/libwebrtc/call/video_send_stream.cc
--- a/third_party/libwebrtc/call/video_send_stream.cc
+++ b/third_party/libwebrtc/call/video_send_stream.cc
@@ -29,16 +29,17 @@ const char* StreamTypeToString(VideoSend
     case VideoSendStream::StreamStats::StreamType::kMedia:
       return "media";
     case VideoSendStream::StreamStats::StreamType::kRtx:
       return "rtx";
     case VideoSendStream::StreamStats::StreamType::kFlexfec:
       return "flexfec";
   }
   RTC_CHECK_NOTREACHED();
+  return "";
 }
 
 }  // namespace
 
 VideoSendStream::StreamStats::StreamStats() = default;
 VideoSendStream::StreamStats::~StreamStats() = default;
 
 std::string VideoSendStream::StreamStats::ToString() const {
diff --git a/third_party/libwebrtc/media/base/codec_comparators.cc b/third_party/libwebrtc/media/base/codec_comparators.cc
--- a/third_party/libwebrtc/media/base/codec_comparators.cc
+++ b/third_party/libwebrtc/media/base/codec_comparators.cc
@@ -189,16 +189,17 @@ bool MatchesWithCodecRules(const Codec& 
                 left_codec.bitrate == right_codec.bitrate) &&
                ((right_codec.channels < 2 && left_codec.channels < 2) ||
                 left_codec.channels == right_codec.channels);
 
       case Codec::Type::kVideo:
         return IsSameCodecSpecific(left_codec.name, left_codec.params,
                                    right_codec.name, right_codec.params);
     }
+    return false;
   };
 
   return matches_id && matches_type_specific();
 }
 
 // Finds a codec in `codecs2` that matches `codec_to_match`, which is
 // a member of `codecs1`. If `codec_to_match` is an RED or RTX codec, both
 // the codecs themselves and their associated codecs must match.
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc b/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc
--- a/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/clipping_predictor.cc
@@ -373,12 +373,12 @@ std::unique_ptr<ClippingPredictor> Creat
           config.reference_window_delay, config.clipping_threshold,
           /*adaptive_step_estimation=*/true);
     case ClippingPredictorMode::kFixedStepClippingPeakPrediction:
       return std::make_unique<ClippingPeakPredictor>(
           num_channels, config.window_length, config.reference_window_length,
           config.reference_window_delay, config.clipping_threshold,
           /*adaptive_step_estimation=*/false);
   }
-  RTC_DCHECK_NOTREACHED();
+  RTC_CHECK_NOTREACHED();
 }
 
 }  // namespace webrtc
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc
--- a/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/input_volume_stats_reporter.cc
@@ -43,16 +43,17 @@ int ComputeAverageUpdate(int sum_updates
 constexpr absl::string_view MetricNamePrefix(
     InputVolumeType input_volume_type) {
   switch (input_volume_type) {
     case InputVolumeType::kApplied:
       return "WebRTC.Audio.Apm.AppliedInputVolume.";
     case InputVolumeType::kRecommended:
       return "WebRTC.Audio.Apm.RecommendedInputVolume.";
   }
+  RTC_CHECK_NOTREACHED();
 }
 
 metrics::Histogram* CreateVolumeHistogram(InputVolumeType input_volume_type) {
   char buffer[64];
   rtc::SimpleStringBuilder builder(buffer);
   builder << MetricNamePrefix(input_volume_type) << "OnChange";
   return metrics::HistogramFactoryGetCountsLinear(/*name=*/builder.str(),
                                                   /*min=*/1,
diff --git a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc
--- a/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc
+++ b/third_party/libwebrtc/modules/audio_processing/agc2/rnn_vad/rnn_fc.cc
@@ -55,16 +55,18 @@ std::vector<float> PreprocessWeights(rtc
 rtc::FunctionView<float(float)> GetActivationFunction(
     ActivationFunction activation_function) {
   switch (activation_function) {
     case ActivationFunction::kTansigApproximated:
       return ::rnnoise::TansigApproximated;
     case ActivationFunction::kSigmoidApproximated:
       return ::rnnoise::SigmoidApproximated;
   }
+  // supposed to be never reached apparently therefore returning bogus
+  return ::rnnoise::TansigApproximated;
 }
 
 }  // namespace
 
 FullyConnectedLayer::FullyConnectedLayer(
     const int input_size,
     const int output_size,
     const rtc::ArrayView<const int8_t> bias,
diff --git a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc
--- a/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc
+++ b/third_party/libwebrtc/modules/audio_processing/audio_processing_impl.cc
@@ -95,16 +95,17 @@ GainControl::Mode Agc1ConfigModeToInterf
     case Agc1Config::kAdaptiveAnalog:
       return GainControl::kAdaptiveAnalog;
     case Agc1Config::kAdaptiveDigital:
       return GainControl::kAdaptiveDigital;
     case Agc1Config::kFixedDigital:
       return GainControl::kFixedDigital;
   }
   RTC_CHECK_NOTREACHED();
+  return GainControl::kAdaptiveAnalog;
 }
 
 bool MinimizeProcessingForUnusedOutput() {
   return !field_trial::IsEnabled("WebRTC-MutedStateKillSwitch");
 }
 
 // Maximum lengths that frame of samples being passed from the render side to
 // the capture side can have (does not apply to AEC3).
@@ -162,17 +163,17 @@ int AudioFormatValidityToErrorCode(Audio
     case AudioFormatValidity::kValidAndSupported:
       return AudioProcessing::kNoError;
     case AudioFormatValidity::kValidButUnsupportedSampleRate:  // fall-through
     case AudioFormatValidity::kInvalidSampleRate:
       return AudioProcessing::kBadSampleRateError;
     case AudioFormatValidity::kInvalidChannelCount:
       return AudioProcessing::kBadNumberChannelsError;
   }
-  RTC_DCHECK(false);
+  RTC_CHECK_NOTREACHED();
 }
 
 // Returns an AudioProcessing::Error together with the best possible option for
 // output audio content.
 std::pair<int, FormatErrorOutputOption> ChooseErrorOutputOption(
     const StreamConfig& input_config,
     const StreamConfig& output_config) {
   AudioFormatValidity input_validity = ValidateAudioFormat(input_config);
@@ -2058,16 +2059,17 @@ void AudioProcessingImpl::InitializeNois
             case NoiseSuppresionConfig::kModerate:
               return NsConfig::SuppressionLevel::k12dB;
             case NoiseSuppresionConfig::kHigh:
               return NsConfig::SuppressionLevel::k18dB;
             case NoiseSuppresionConfig::kVeryHigh:
               return NsConfig::SuppressionLevel::k21dB;
           }
           RTC_CHECK_NOTREACHED();
+	  return NsConfig::SuppressionLevel::k6dB;
         };
 
     NsConfig cfg;
     cfg.target_level = map_level(config_.noise_suppression.level);
     submodules_.noise_suppressor = std::make_unique<NoiseSuppressor>(
         cfg, proc_sample_rate_hz(), num_proc_channels());
   }
 }
diff --git a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
--- a/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
+++ b/third_party/libwebrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc
@@ -81,16 +81,18 @@ BandwidthLimitedCause GetBandwidthLimite
       // Probes may not be sent in this state.
       return BandwidthLimitedCause::kLossLimitedBwe;
     case LossBasedState::kIncreasing:
       // Probes may be sent in this state.
       return BandwidthLimitedCause::kLossLimitedBweIncreasing;
     case LossBasedState::kDelayBasedEstimate:
       return BandwidthLimitedCause::kDelayBasedLimited;
   }
+  // just return something by default
+  return BandwidthLimitedCause::kLossLimitedBwe;
 }
 
 }  // namespace
 
 GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config,
                                                  GoogCcConfig goog_cc_config)
     : env_(config.env),
       packet_feedback_only_(goog_cc_config.feedback_only),
diff --git a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc
--- a/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc
+++ b/third_party/libwebrtc/modules/desktop_capture/linux/wayland/screencast_portal.cc
@@ -39,16 +39,17 @@ ScreenCastPortal::CaptureSourceType Scre
   switch (type) {
     case CaptureType::kScreen:
       return ScreenCastPortal::CaptureSourceType::kScreen;
     case CaptureType::kWindow:
       return ScreenCastPortal::CaptureSourceType::kWindow;
     case CaptureType::kAnyScreenContent:
       return ScreenCastPortal::CaptureSourceType::kAnyScreenContent;
   }
+  RTC_CHECK_NOTREACHED();
 }
 
 ScreenCastPortal::ScreenCastPortal(CaptureType type, PortalNotifier* notifier)
     : ScreenCastPortal(type,
                        notifier,
                        OnProxyRequested,
                        OnSourcesRequestResponseSignal,
                        this) {}
diff --git a/third_party/libwebrtc/modules/pacing/bitrate_prober.cc b/third_party/libwebrtc/modules/pacing/bitrate_prober.cc
--- a/third_party/libwebrtc/modules/pacing/bitrate_prober.cc
+++ b/third_party/libwebrtc/modules/pacing/bitrate_prober.cc
@@ -86,16 +86,17 @@ bool BitrateProber::ReadyToSetActiveStat
         return true;
       }
       // If config_.min_packet_size > 0, a "large enough" packet must be
       // sent first, before a probe can be generated and sent. Otherwise,
       // send the probe asap.
       return packet_size >=
              std::min(RecommendedMinProbeSize(), config_.min_packet_size.Get());
   }
+  RTC_CHECK_NOTREACHED();
 }
 
 void BitrateProber::OnIncomingPacket(DataSize packet_size) {
   MaybeSetActiveState(packet_size);
 }
 
 void BitrateProber::CreateProbeCluster(
     const ProbeClusterConfig& cluster_config) {
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc
@@ -41,11 +41,12 @@ std::unique_ptr<VideoRtpDepacketizer> Cr
       return std::make_unique<VideoRtpDepacketizerH265>();
 #else
       return nullptr;
 #endif
     case kVideoCodecGeneric:
       return std::make_unique<VideoRtpDepacketizerGeneric>();
   }
   RTC_CHECK_NOTREACHED();
+  return nullptr;
 }
 
 }  // namespace webrtc
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtcp_packet/congestion_control_feedback.cc
@@ -105,16 +105,17 @@ uint16_t To2BitEcn(rtc::EcnMarking ecn_m
       return 0;
     case rtc::EcnMarking::kEct1:
       return kEcnEct1 << 13;
     case rtc::EcnMarking::kEct0:
       return kEcnEct0 << 13;
     case rtc::EcnMarking::kCe:
       return kEcnCe << 13;
   }
+  return 0; // should not be reached
 }
 
 rtc::EcnMarking ToEcnMarking(uint16_t receive_info) {
   const uint16_t ecn = (receive_info >> 13) & 0b11;
   if (ecn == kEcnEct1) {
     return rtc::EcnMarking::kEct1;
   }
   if (ecn == kEcnEct0) {
diff --git a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc
--- a/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc
+++ b/third_party/libwebrtc/modules/rtp_rtcp/source/rtp_sender.cc
@@ -145,16 +145,17 @@ bool IsNonVolatile(RTPExtensionType type
 #if defined(WEBRTC_MOZILLA_BUILD)
     case kRtpExtensionCsrcAudioLevel:
       // TODO: Mozilla implement for CsrcAudioLevel
       RTC_CHECK(false);
       return false;
 #endif
   }
   RTC_CHECK_NOTREACHED();
+  return false;
 }
 
 bool HasBweExtension(const RtpHeaderExtensionMap& extensions_map) {
   return extensions_map.IsRegistered(kRtpExtensionTransportSequenceNumber) ||
          extensions_map.IsRegistered(kRtpExtensionTransportSequenceNumber02) ||
          extensions_map.IsRegistered(kRtpExtensionAbsoluteSendTime) ||
          extensions_map.IsRegistered(kRtpExtensionTransmissionTimeOffset);
 }
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/third_party/libwebrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
--- a/third_party/libwebrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
+++ b/third_party/libwebrtc/modules/video_coding/codecs/vp8/default_temporal_layers.cc
@@ -104,16 +104,17 @@ size_t BufferToIndex(Vp8BufferReference 
       return 0;
     case Vp8FrameConfig::Vp8BufferReference::kGolden:
       return 1;
     case Vp8FrameConfig::Vp8BufferReference::kAltref:
       return 2;
     case Vp8FrameConfig::Vp8BufferReference::kNone:
       RTC_CHECK_NOTREACHED();
   }
+  RTC_CHECK_NOTREACHED();
 }
 
 }  // namespace
 
 constexpr size_t DefaultTemporalLayers::kNumReferenceBuffers;
 
 std::vector<DefaultTemporalLayers::DependencyInfo>
 DefaultTemporalLayers::GetDependencyInfo(size_t num_layers) {
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc b/third_party/libwebrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc
--- a/third_party/libwebrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc
+++ b/third_party/libwebrtc/modules/video_coding/codecs/vp8/temporal_layers_checker.cc
@@ -25,16 +25,17 @@ TemporalLayersChecker::CreateTemporalLay
     case Vp8TemporalLayersType::kFixedPattern:
       return std::make_unique<DefaultTemporalLayersChecker>(
           num_temporal_layers);
     case Vp8TemporalLayersType::kBitrateDynamic:
       // Conference mode temporal layering for screen content in base stream.
       return std::make_unique<TemporalLayersChecker>(num_temporal_layers);
   }
   RTC_CHECK_NOTREACHED();
+  return nullptr;
 }
 
 TemporalLayersChecker::TemporalLayersChecker(int num_temporal_layers)
     : num_temporal_layers_(num_temporal_layers),
       sequence_number_(0),
       last_sync_sequence_number_(0),
       last_tl0_sequence_number_(0) {}
 
diff --git a/third_party/libwebrtc/modules/video_coding/h26x_packet_buffer.cc b/third_party/libwebrtc/modules/video_coding/h26x_packet_buffer.cc
--- a/third_party/libwebrtc/modules/video_coding/h26x_packet_buffer.cc
+++ b/third_party/libwebrtc/modules/video_coding/h26x_packet_buffer.cc
@@ -58,16 +58,17 @@ bool BeginningOfIdr(const H26xPacketBuff
     case kH264StapA:
     case kH264SingleNalu: {
       return contains_idr_nalu;
     }
     case kH264FuA: {
       return contains_idr_nalu && IsFirstPacketOfFragment(h264_header);
     }
   }
+  return false;
 }
 
 bool HasSps(const H26xPacketBuffer::Packet& packet) {
   auto& h264_header =
       absl::get<RTPVideoHeaderH264>(packet.video_header.video_type_header);
   return absl::c_any_of(h264_header.nalus, [](const auto& nalu_info) {
     return nalu_info.type == H264::NaluType::kSps;
   });
diff --git a/third_party/libwebrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc b/third_party/libwebrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc
--- a/third_party/libwebrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc
+++ b/third_party/libwebrtc/net/dcsctp/packet/parameter/reconfiguration_response_parameter.cc
@@ -57,16 +57,17 @@ absl::string_view ToString(Reconfigurati
     case ReconfigurationResponseParameter::Result::
         kErrorRequestAlreadyInProgress:
       return "Error: request already in progress";
     case ReconfigurationResponseParameter::Result::kErrorBadSequenceNumber:
       return "Error: bad sequence number";
     case ReconfigurationResponseParameter::Result::kInProgress:
       return "In progress";
   }
+  return "Should never be reached";
 }
 
 std::optional<ReconfigurationResponseParameter>
 ReconfigurationResponseParameter::Parse(rtc::ArrayView<const uint8_t> data) {
   std::optional<BoundedByteReader<kHeaderSize>> reader = ParseTLV(data);
   if (!reader.has_value()) {
     return std::nullopt;
   }
diff --git a/third_party/libwebrtc/net/dcsctp/public/dcsctp_handover_state.cc b/third_party/libwebrtc/net/dcsctp/public/dcsctp_handover_state.cc
--- a/third_party/libwebrtc/net/dcsctp/public/dcsctp_handover_state.cc
+++ b/third_party/libwebrtc/net/dcsctp/public/dcsctp_handover_state.cc
@@ -38,16 +38,17 @@ constexpr absl::string_view HandoverUnre
       return "RETRANSMISSION_QUEUE_FAST_RECOVERY";
     case HandoverUnreadinessReason::kRetransmissionQueueNotEmpty:
       return "RETRANSMISSION_QUEUE_NOT_EMPTY";
     case HandoverUnreadinessReason::kPendingStreamReset:
       return "PENDING_STREAM_RESET";
     case HandoverUnreadinessReason::kPendingStreamResetRequest:
       return "PENDING_STREAM_RESET_REQUEST";
   }
+  return "NOTREACHED";
 }
 }  // namespace
 
 std::string HandoverReadinessStatus::ToString() const {
   std::string result;
   for (uint32_t bit = 1;
        bit <= static_cast<uint32_t>(HandoverUnreadinessReason::kMax);
        bit *= 2) {
diff --git a/third_party/libwebrtc/net/dcsctp/rx/data_tracker.cc b/third_party/libwebrtc/net/dcsctp/rx/data_tracker.cc
--- a/third_party/libwebrtc/net/dcsctp/rx/data_tracker.cc
+++ b/third_party/libwebrtc/net/dcsctp/rx/data_tracker.cc
@@ -354,16 +354,17 @@ absl::string_view DataTracker::ToString(
       return "IDLE";
     case AckState::kBecomingDelayed:
       return "BECOMING_DELAYED";
     case AckState::kDelayed:
       return "DELAYED";
     case AckState::kImmediate:
       return "IMMEDIATE";
   }
+  return "NOTREACHED";
 }
 
 HandoverReadinessStatus DataTracker::GetHandoverReadiness() const {
   HandoverReadinessStatus status;
   if (!additional_tsn_blocks_.empty()) {
     status.Add(HandoverUnreadinessReason::kDataTrackerTsnBlocksPending);
   }
   return status;
diff --git a/third_party/libwebrtc/net/dcsctp/socket/dcsctp_socket.cc b/third_party/libwebrtc/net/dcsctp/socket/dcsctp_socket.cc
--- a/third_party/libwebrtc/net/dcsctp/socket/dcsctp_socket.cc
+++ b/third_party/libwebrtc/net/dcsctp/socket/dcsctp_socket.cc
@@ -249,16 +249,17 @@ bool DcSctpSocket::IsConsistent() const 
               !t1_cookie_->is_running() && t2_shutdown_->is_running());
     case State::kShutdownReceived:
       return (tcb_ != nullptr && !t1_init_->is_running() &&
               !t1_cookie_->is_running() && !t2_shutdown_->is_running());
     case State::kShutdownAckSent:
       return (tcb_ != nullptr && !t1_init_->is_running() &&
               !t1_cookie_->is_running() && t2_shutdown_->is_running());
   }
+  return false; // notreached
 }
 
 constexpr absl::string_view DcSctpSocket::ToString(DcSctpSocket::State state) {
   switch (state) {
     case DcSctpSocket::State::kClosed:
       return "CLOSED";
     case DcSctpSocket::State::kCookieWait:
       return "COOKIE_WAIT";
@@ -270,16 +271,17 @@ constexpr absl::string_view DcSctpSocket
       return "SHUTDOWN_PENDING";
     case DcSctpSocket::State::kShutdownSent:
       return "SHUTDOWN_SENT";
     case DcSctpSocket::State::kShutdownReceived:
       return "SHUTDOWN_RECEIVED";
     case DcSctpSocket::State::kShutdownAckSent:
       return "SHUTDOWN_ACK_SENT";
   }
+  return "NOTREACHED";
 }
 
 void DcSctpSocket::SetState(State state, absl::string_view reason) {
   if (state_ != state) {
     RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Socket state changed from "
                          << ToString(state_) << " to " << ToString(state)
                          << " due to " << reason;
     state_ = state;
@@ -588,16 +590,17 @@ SocketState DcSctpSocket::state() const 
     case State::kEstablished:
       return SocketState::kConnected;
     case State::kShutdownPending:
     case State::kShutdownSent:
     case State::kShutdownReceived:
     case State::kShutdownAckSent:
       return SocketState::kShuttingDown;
   }
+  return SocketState::kShuttingDown; // notreached
 }
 
 void DcSctpSocket::SetMaxMessageSize(size_t max_message_size) {
   options_.max_message_size = max_message_size;
 }
 
 size_t DcSctpSocket::buffered_amount(StreamID stream_id) const {
   return send_queue_.buffered_amount(stream_id);
diff --git a/third_party/libwebrtc/net/dcsctp/socket/packet_sender.cc b/third_party/libwebrtc/net/dcsctp/socket/packet_sender.cc
--- a/third_party/libwebrtc/net/dcsctp/socket/packet_sender.cc
+++ b/third_party/libwebrtc/net/dcsctp/socket/packet_sender.cc
@@ -39,10 +39,11 @@ bool PacketSender::Send(SctpPacket::Buil
       return false;
     }
 
     case SendPacketStatus::kError: {
       // Nothing that can be done.
       return false;
     }
   }
+  return false; // not reached
 }
 }  // namespace dcsctp
diff --git a/third_party/libwebrtc/net/dcsctp/timer/timer.cc b/third_party/libwebrtc/net/dcsctp/timer/timer.cc
--- a/third_party/libwebrtc/net/dcsctp/timer/timer.cc
+++ b/third_party/libwebrtc/net/dcsctp/timer/timer.cc
@@ -44,16 +44,17 @@ TimeDelta GetBackoffDuration(const Timer
         if (duration > options.max_backoff_duration) {
           return options.max_backoff_duration;
         }
       }
 
       return TimeDelta(std::min(duration, Timer::kMaxTimerDuration));
     }
   }
+  return base_duration; // fake nonreached
 }
 }  // namespace
 
 constexpr TimeDelta Timer::kMaxTimerDuration;
 
 Timer::Timer(TimerID id,
              absl::string_view name,
              OnExpired on_expired,
diff --git a/third_party/libwebrtc/video/adaptation/video_stream_encoder_resource_manager.cc b/third_party/libwebrtc/video/adaptation/video_stream_encoder_resource_manager.cc
--- a/third_party/libwebrtc/video/adaptation/video_stream_encoder_resource_manager.cc
+++ b/third_party/libwebrtc/video/adaptation/video_stream_encoder_resource_manager.cc
@@ -59,16 +59,17 @@ bool IsFramerateScalingEnabled(Degradati
 std::string ToString(VideoAdaptationReason reason) {
   switch (reason) {
     case VideoAdaptationReason::kQuality:
       return "quality";
     case VideoAdaptationReason::kCpu:
       return "cpu";
   }
   RTC_CHECK_NOTREACHED();
+  return "";
 }
 
 std::vector<bool> GetActiveLayersFlags(const VideoCodec& codec) {
   std::vector<bool> flags;
   if (codec.codecType == VideoCodecType::kVideoCodecVP9) {
     flags.resize(codec.VP9().numberOfSpatialLayers);
     for (size_t i = 0; i < flags.size(); ++i) {
       flags[i] = codec.spatialLayers[i].active;
diff --git a/third_party/libwebrtc/video/config/encoder_stream_factory.cc b/third_party/libwebrtc/video/config/encoder_stream_factory.cc
--- a/third_party/libwebrtc/video/config/encoder_stream_factory.cc
+++ b/third_party/libwebrtc/video/config/encoder_stream_factory.cc
@@ -104,16 +104,17 @@ int GetDefaultMaxQp(webrtc::VideoCodecTy
     case webrtc::kVideoCodecH265:
       return kDefaultVideoMaxQpH26x;
     case webrtc::kVideoCodecVP8:
     case webrtc::kVideoCodecVP9:
     case webrtc::kVideoCodecAV1:
     case webrtc::kVideoCodecGeneric:
       return kDefaultVideoMaxQpVpx;
   }
+  return kDefaultVideoMaxQpVpx; // fake return for hopefully not reached
 }
 
 // Round size to nearest simulcast-friendly size.
 // Simulcast stream width and height must both be dividable by
 // |2 ^ (simulcast_layers - 1)|.
 int NormalizeSimulcastSize(const FieldTrialsView& field_trials,
                            int size,
                            size_t simulcast_layers) {
openSUSE Build Service is sponsored by