You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@openmeetings.apache.org by so...@apache.org on 2022/12/09 09:39:05 UTC

[openmeetings] branch master updated (601be4d44 -> af8a95df1)

This is an automated email from the ASF dual-hosted git repository.

solomax pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/openmeetings.git


    from 601be4d44 [OPENMEETINGS-2732] tomcat, jettison
     new 92a8a51a6 [OPENMEETINGS-2253] RTC related JS code is simplified; deprecated kurento-utils-js is dropped
     new ffef66bcf [OPENMEETINGS-2732] tinyify, ua-parser-js, terser
     new af8a95df1 [OPENMEETINGS-2253] camera/microphone on/off is not causes media-stream re-negotiation

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../openmeetings/db/entity/basic/Client.java       | 281 +++-------
 .../{IWsClient.java => ScreenStreamDesc.java}      |  28 +-
 .../openmeetings/db/entity/basic/StreamDesc.java   | 132 +++++
 .../db/entity/basic/WebcamStreamDesc.java          | 116 ++++
 .../apache/openmeetings/db/entity/room/Room.java   |   4 +
 .../org/apache/openmeetings/mediaserver/KRoom.java |  27 +-
 .../apache/openmeetings/mediaserver/KStream.java   |  12 +-
 .../openmeetings/mediaserver/KurentoHandler.java   |  22 +-
 .../openmeetings/mediaserver/StreamProcessor.java  | 135 ++---
 .../mediaserver/StreamProcessorActions.java        |   8 +-
 .../mediaserver/TestRecordingFlowMocked.java       |   2 +-
 openmeetings-web/src/main/front/chat/package.json  |   2 +-
 openmeetings-web/src/main/front/main/package.json  |   4 +-
 openmeetings-web/src/main/front/room/package.json  |   2 +-
 openmeetings-web/src/main/front/room/src/sharer.js |   3 +
 .../src/main/front/room/src/user-list.js           |  13 +-
 openmeetings-web/src/main/front/room/src/video.js  | 289 +++++-----
 .../src/main/front/settings/package.json           |   7 +-
 .../src/main/front/settings/src/WebRtcPeer.js      | 592 +++++++++++++++++++++
 .../src/main/front/settings/src/index.js           |   9 +-
 .../src/main/front/settings/src/mic-level.js       |  32 +-
 .../src/main/front/settings/src/settings.js        | 149 ++----
 .../src/main/front/settings/src/video-util.js      |  50 +-
 openmeetings-web/src/main/front/wb/package.json    |   2 +-
 .../web/admin/connection/KStreamDto.java           |   2 +-
 .../apache/openmeetings/web/room/RoomPanel.java    |  33 +-
 .../openmeetings/web/room/sidebar/RoomSidebar.java |   2 +-
 27 files changed, 1286 insertions(+), 672 deletions(-)
 copy openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/{IWsClient.java => ScreenStreamDesc.java} (64%)
 create mode 100644 openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/StreamDesc.java
 create mode 100644 openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/WebcamStreamDesc.java
 create mode 100644 openmeetings-web/src/main/front/settings/src/WebRtcPeer.js


[openmeetings] 03/03: [OPENMEETINGS-2253] camera/microphone on/off is not causes media-stream re-negotiation

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

solomax pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/openmeetings.git

commit af8a95df1c4fdebe8967840517918d0c1edfe86f
Author: Maxim Solodovnik <so...@gmail.com>
AuthorDate: Fri Dec 9 16:38:55 2022 +0700

    [OPENMEETINGS-2253] camera/microphone on/off is not causes media-stream re-negotiation
---
 .../openmeetings/db/entity/basic/Client.java       | 281 +++++----------------
 .../db/entity/basic/ScreenStreamDesc.java          |  39 +++
 .../openmeetings/db/entity/basic/StreamDesc.java   | 132 ++++++++++
 .../db/entity/basic/WebcamStreamDesc.java          | 116 +++++++++
 .../apache/openmeetings/db/entity/room/Room.java   |   4 +
 .../org/apache/openmeetings/mediaserver/KRoom.java |  27 +-
 .../apache/openmeetings/mediaserver/KStream.java   |  12 +-
 .../openmeetings/mediaserver/KurentoHandler.java   |  22 +-
 .../openmeetings/mediaserver/StreamProcessor.java  | 135 ++++------
 .../mediaserver/StreamProcessorActions.java        |   8 +-
 .../mediaserver/TestRecordingFlowMocked.java       |   2 +-
 openmeetings-web/src/main/front/room/src/sharer.js |   3 +
 .../src/main/front/room/src/user-list.js           |  13 +-
 openmeetings-web/src/main/front/room/src/video.js  |  99 +++++---
 .../src/main/front/settings/src/mic-level.js       |  28 +-
 .../src/main/front/settings/src/settings.js        |  13 +-
 .../src/main/front/settings/src/video-util.js      |  27 +-
 .../web/admin/connection/KStreamDto.java           |   2 +-
 .../apache/openmeetings/web/room/RoomPanel.java    |  33 ++-
 .../openmeetings/web/room/sidebar/RoomSidebar.java |   2 +-
 20 files changed, 573 insertions(+), 425 deletions(-)

diff --git a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/Client.java b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/Client.java
index e16a9657f..126ae7047 100644
--- a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/Client.java
+++ b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/Client.java
@@ -21,10 +21,7 @@ package org.apache.openmeetings.db.entity.basic;
 import static java.util.UUID.randomUUID;
 import static org.apache.openmeetings.util.OmFileHelper.SIP_USER_ID;
 
-import java.io.Serializable;
-import java.util.ArrayList;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -49,6 +46,7 @@ import com.github.openjson.JSONObject;
 public class Client implements IDataProviderEntity, IWsClient {
 	private static final long serialVersionUID = 1L;
 
+
 	public enum Activity {
 		AUDIO //sends Audio to the room
 		, VIDEO //sends Video to the room
@@ -68,7 +66,6 @@ public class Client implements IDataProviderEntity, IWsClient {
 	private final String sid;
 	private String remoteAddress;
 	private final Set<Right> rights = ConcurrentHashMap.newKeySet();
-	private final Set<Activity> activities = ConcurrentHashMap.newKeySet();
 	private final Map<String, StreamDesc> streams = new ConcurrentHashMap<>();
 	private final Date connectedSince;
 	private int cam = -1;
@@ -134,7 +131,6 @@ public class Client implements IDataProviderEntity, IWsClient {
 	}
 
 	public void clear() {
-		activities.clear();
 		rights.clear();
 		streams.clear();
 	}
@@ -165,67 +161,49 @@ public class Client implements IDataProviderEntity, IWsClient {
 		}
 	}
 
-	public void clearActivities() {
-		activities.clear();
+	public boolean isBroadcasting() {
+		return getCamStreams()
+				.anyMatch(WebcamStreamDesc::isBroadcasting);
 	}
 
-	public boolean hasAnyActivity(Activity... aa) {
-		boolean res = false;
-		if (aa != null) {
-			for (Activity a : aa) {
-				res |= activities.contains(a);
-			}
-		}
-		return res;
+	public List<Activity> getActivities() {
+		return getCamStreams()
+				.flatMap(sd -> sd.getActivities().stream())
+				.toList();
 	}
 
-	public boolean hasActivity(Activity a) {
-		return activities.contains(a);
+	public boolean has(Activity activity) {
+		return getCamStreams()
+				.flatMap(sd -> sd.getActivities().stream())
+				.anyMatch(a -> activity == a);
 	}
 
-	public Client toggle(Activity a) {
-		if (hasActivity(a)) {
-			remove(a);
-		} else {
-			set(a);
+	public boolean isAllowed(Activity a) {
+		boolean r = false;
+		if (room == null) {
+			return r;
 		}
-		return this;
-	}
-
-	public Client set(Activity a) {
-		activities.add(a);
 		switch (a) {
-			case VIDEO, AUDIO:
-				if (hasActivity(Activity.AUDIO) && hasActivity(Activity.VIDEO)) {
-					activities.add(Activity.AUDIO_VIDEO);
-				}
+			case AUDIO:
+				r = hasRight(Right.AUDIO);
 				break;
-			case AUDIO_VIDEO:
-				activities.add(Activity.AUDIO);
-				activities.add(Activity.VIDEO);
-				break;
-			default:
-		}
-		return this;
-	}
-
-	public Client remove(Activity a) {
-		activities.remove(a);
-		switch (a) {
-			case VIDEO, AUDIO:
-				activities.remove(Activity.AUDIO_VIDEO);
+			case VIDEO:
+				r = !room.isAudioOnly() && hasRight(Right.VIDEO);
 				break;
 			case AUDIO_VIDEO:
-				activities.remove(Activity.AUDIO);
-				activities.remove(Activity.VIDEO);
+				r = !room.isAudioOnly() && hasRight(Right.AUDIO) && hasRight(Right.VIDEO);
 				break;
 			default:
+				break;
 		}
-		return this;
+		return r;
 	}
 
-	public StreamDesc addStream(StreamType stype, Activity...inActivities) {
-		StreamDesc sd = new StreamDesc(stype, inActivities);
+	public StreamDesc addStream(StreamType stype, Activity toggle) {
+		StreamDesc sd = switch(stype) {
+			case SCREEN -> new ScreenStreamDesc(this);
+			case WEBCAM -> new WebcamStreamDesc(this, toggle);
+		};
 		streams.put(sd.getUid(), sd);
 		return sd;
 	}
@@ -235,31 +213,24 @@ public class Client implements IDataProviderEntity, IWsClient {
 	}
 
 	public List<StreamDesc> getStreams() {
-		return new ArrayList<>(streams.values());
+		return List.copyOf(streams.values());
 	}
 
 	public StreamDesc getStream(String inUid) {
 		return streams.get(inUid);
 	}
 
-	public Optional<StreamDesc> getScreenStream() {
+	public Optional<ScreenStreamDesc> getScreenStream() {
 		return streams.values().stream()
 				.filter(sd -> StreamType.SCREEN == sd.getType())
+				.map(sd -> (ScreenStreamDesc)sd)
 				.findFirst();
 	}
 
-	public Stream<StreamDesc> getCamStreams() {
+	public Stream<WebcamStreamDesc> getCamStreams() {
 		return streams.values().stream()
-				.filter(sd -> StreamType.WEBCAM == sd.getType());
-	}
-
-	public Client restoreActivities(StreamDesc sd) {
-		synchronized (activities) {
-			Set<Activity> aa = new HashSet<>(sd.sactivities);
-			activities.clear();
-			activities.addAll(aa);
-		}
-		return this;
+				.filter(sd -> StreamType.WEBCAM == sd.getType())
+				.map(sd -> (WebcamStreamDesc)sd);
 	}
 
 	public Date getConnectedSince() {
@@ -280,13 +251,17 @@ public class Client implements IDataProviderEntity, IWsClient {
 		return room;
 	}
 
+	public Long getRoomId() {
+		return room == null ? null : room.getId();
+	}
+
 	public Client setRoom(Room room) {
 		this.room = room;
 		return this;
 	}
 
 	public boolean isCamEnabled() {
-		return cam > -1;
+		return (room == null || !room.isAudioOnly()) && cam > -1;
 	}
 
 	public int getCam() {
@@ -312,7 +287,7 @@ public class Client implements IDataProviderEntity, IWsClient {
 	}
 
 	public int getWidth() {
-		return width;
+		return room != null && room.isInterview() ? 320 : width;
 	}
 
 	public Client setWidth(int width) {
@@ -321,7 +296,7 @@ public class Client implements IDataProviderEntity, IWsClient {
 	}
 
 	public int getHeight() {
-		return height;
+		return room != null && room.isInterview() ? 260 : height;
 	}
 
 	public Client setHeight(int height) {
@@ -346,11 +321,8 @@ public class Client implements IDataProviderEntity, IWsClient {
 		this.serverId = serverId;
 	}
 
-	public Long getRoomId() {
-		return room == null ? null : room.getId();
-	}
-
-	private JSONObject addUserJson(JSONObject o) {
+	// package private for StremDesc
+	JSONObject addUserJson(JSONObject o) {
 		JSONObject u = new JSONObject();
 		if (user != null) {
 			JSONObject a = new JSONObject();
@@ -371,27 +343,35 @@ public class Client implements IDataProviderEntity, IWsClient {
 				.put("level", hasRight(Right.MODERATOR) ? 5 : (hasRight(Right.WHITEBOARD) ? 3 : 1));
 	}
 
+	// package private for StremDesc
+	JSONObject addCamMic(boolean self, JSONObject json) {
+		if (self) {
+			json.put("cam", cam).put("mic", mic);
+		}
+		return json;
+	}
+
 	public JSONObject toJson(boolean self) {
 		JSONArray streamArr = new JSONArray();
 		for (Entry<String, StreamDesc> e : streams.entrySet()) {
-			streamArr.put(e.getValue().toJson());
+			streamArr.put(e.getValue().toJson(self));
 		}
 		JSONObject json = new JSONObject()
 				.put("cuid", uid)
 				.put("uid", uid)
 				.put("rights", new JSONArray(rights))
-				.put("activities", new JSONArray(activities))
+				.put("activities", new JSONArray(getActivities()))
 				.put("streams", streamArr)
-				.put("width", width)
-				.put("height", height)
+				.put("width", getWidth())
+				.put("height", getHeight())
 				.put("self", self);
-		if (self) {
-			json.put("cam", cam).put("mic", mic);
-		}
-		return addUserJson(json);
+		return addUserJson(addCamMic(self, json));
 	}
 
 	public void merge(Client c) {
+		if (c == this) {
+			return;
+		}
 		user = c.user;
 		room = c.room;
 		synchronized (rights) {
@@ -399,17 +379,15 @@ public class Client implements IDataProviderEntity, IWsClient {
 			rights.clear();
 			rights.addAll(rr);
 		}
-		synchronized (activities) {
-			Set<Activity> aa = new HashSet<>(c.activities);
-			activities.clear();
-			activities.addAll(aa);
-		}
 		synchronized (streams) {
-			Map<String, StreamDesc> ss = new HashMap<>(c.streams);
 			streams.clear();
-			for (Entry<String, StreamDesc> e : ss.entrySet()) {
-				streams.put(e.getKey(), new StreamDesc(e.getValue()));
-			}
+			c.streams.values().stream()
+				.map(sd ->
+					switch (sd.getType()) {
+						case SCREEN -> new ScreenStreamDesc((ScreenStreamDesc)sd);
+						case WEBCAM -> new WebcamStreamDesc((WebcamStreamDesc)sd);
+					}
+				).forEach(sd -> streams.put(sd.getUid(), sd));
 		}
 		cam = c.cam;
 		mic = c.mic;
@@ -450,131 +428,6 @@ public class Client implements IDataProviderEntity, IWsClient {
 	@Override
 	public String toString() {
 		return "Client [uid=" + uid + ", sessionId=" + sessionId + ", pageId=" + pageId + ", userId=" + getUserId() + ", room=" + getRoomId()
-				+ ", rights=" + rights + ", sactivities=" + activities + ", connectedSince=" + connectedSince + "]";
-	}
-
-	public class StreamDesc implements Serializable {
-		private static final long serialVersionUID = 1L;
-		private final Set<Activity> sactivities = ConcurrentHashMap.newKeySet();
-		private final String uuid;
-		private final StreamType type;
-		private int swidth;
-		private int sheight;
-
-		public StreamDesc(StreamDesc sd) {
-			this.uuid = sd.uuid;
-			this.type = sd.type;
-			this.swidth = sd.swidth;
-			this.sheight = sd.sheight;
-			sactivities.addAll(sd.sactivities);
-		}
-
-		public StreamDesc(StreamType type, Activity...activities) {
-			this.uuid = randomUUID().toString();
-			this.type = type;
-			if (activities == null || activities.length == 0) {
-				setActivities();
-			} else {
-				sactivities.addAll(List.of(activities));
-			}
-			if (StreamType.SCREEN == type) {
-				this.swidth = 800;
-				this.sheight = 600;
-			} else if (StreamType.WEBCAM == type) {
-				boolean interview = room != null && Room.Type.INTERVIEW == room.getType();
-				this.swidth = interview ? 320 : width;
-				this.sheight = interview ? 260 : height;
-			}
-		}
-
-		public String getSid() {
-			return sid;
-		}
-
-		public String getUid() {
-			return uuid;
-		}
-
-		public StreamType getType() {
-			return type;
-		}
-
-		public int getWidth() {
-			return swidth;
-		}
-
-		public StreamDesc setWidth(int width) {
-			this.swidth = width;
-			return this;
-		}
-
-		public int getHeight() {
-			return sheight;
-		}
-
-		public StreamDesc setHeight(int height) {
-			this.sheight = height;
-			return this;
-		}
-
-		public StreamDesc setActivities() {
-			sactivities.clear();
-			if (StreamType.WEBCAM == type) {
-				if (Client.this.hasActivity(Activity.AUDIO)) {
-					sactivities.add(Activity.AUDIO);
-				}
-				if (Client.this.hasActivity(Activity.VIDEO)) {
-					sactivities.add(Activity.VIDEO);
-				}
-			}
-			if (StreamType.SCREEN == type) {
-				sactivities.add(Activity.SCREEN);
-			}
-			return this;
-		}
-
-		public boolean hasActivity(Activity a) {
-			return sactivities.contains(a);
-		}
-
-		public void addActivity(Activity a) {
-			sactivities.add(a);
-		}
-
-		public StreamDesc removeActivity(Activity a) {
-			sactivities.remove(a);
-			return this;
-		}
-
-		public Client getClient() {
-			return Client.this;
-		}
-
-		public List<Activity> getActivities() {
-			return List.copyOf(sactivities);
-		}
-
-		public JSONObject toJson() {
-			return toJson(false);
-		}
-
-		public JSONObject toJson(boolean self) {
-			JSONObject o = new JSONObject()
-					.put("uid", uuid)
-					.put("type", type.name())
-					.put("width", swidth)
-					.put("height", sheight)
-					.put("activities", new JSONArray(sactivities))
-					.put("cuid", uid);
-			if (self) {
-				o.put("cam", cam).put("mic", mic);
-			}
-			return addUserJson(o);
-		}
-
-		@Override
-		public String toString() {
-			return String.format("Stream[uid=%s,type=%s,activities=%s]", uid, type, sactivities);
-		}
+				+ ", rights=" + rights + ", activities=" + getActivities() + ", connectedSince=" + connectedSince + "]";
 	}
 }
diff --git a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/ScreenStreamDesc.java b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/ScreenStreamDesc.java
new file mode 100644
index 000000000..1562345a6
--- /dev/null
+++ b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/ScreenStreamDesc.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License") +  you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.openmeetings.db.entity.basic;
+
+import org.apache.openmeetings.db.entity.basic.Client.Activity;
+import org.apache.openmeetings.db.entity.basic.Client.StreamType;
+
+public class ScreenStreamDesc extends StreamDesc {
+	public ScreenStreamDesc(ScreenStreamDesc sd) {
+		super(sd);
+	}
+
+	public ScreenStreamDesc(final Client client) {
+		super(client, StreamType.SCREEN);
+		setWidth(800);
+		setHeight(600);
+	}
+
+	@Override
+	protected boolean allowed(Activity a) {
+		return Activity.SCREEN == a || Activity.RECORD == a;
+	}
+}
diff --git a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/StreamDesc.java b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/StreamDesc.java
new file mode 100644
index 000000000..9508bb208
--- /dev/null
+++ b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/StreamDesc.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License") +  you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.openmeetings.db.entity.basic;
+
+import static java.util.UUID.randomUUID;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.apache.openmeetings.db.entity.basic.Client.Activity;
+import org.apache.openmeetings.db.entity.basic.Client.StreamType;
+
+import com.github.openjson.JSONArray;
+import com.github.openjson.JSONObject;
+
+public abstract class StreamDesc implements Serializable, Cloneable {
+	private static final long serialVersionUID = 1L;
+	protected final Set<Activity> activities = ConcurrentHashMap.newKeySet();
+	private final Client client;
+	private final String uid;
+	private final StreamType type;
+	private int width;
+	private int height;
+
+	public StreamDesc(StreamDesc sd) {
+		this.client = sd.client;
+		this.uid = sd.uid;
+		this.type = sd.type;
+		this.width = sd.width;
+		this.height = sd.height;
+		this.activities.addAll(sd.activities);
+	}
+
+	public StreamDesc(final Client client, StreamType type) {
+		this.client = client;
+		this.uid = randomUUID().toString();
+		this.type = type;
+	}
+
+	protected abstract boolean allowed(Activity a);
+
+	public boolean has(Activity a) {
+		return activities.contains(a);
+	}
+
+	public void add(Activity a) {
+		if (allowed(a)) {
+			activities.add(a);
+		}
+	}
+
+	public StreamDesc remove(Activity a) {
+		activities.remove(a);
+		return this;
+	}
+
+	public List<Activity> getActivities() {
+		return List.copyOf(activities);
+	}
+
+	public Client getClient() {
+		return client;
+	}
+
+	public String getSid() {
+		return client.getSid();
+	}
+
+	public String getUid() {
+		return uid;
+	}
+
+	public StreamType getType() {
+		return type;
+	}
+
+	public int getWidth() {
+		return width;
+	}
+
+	public StreamDesc setWidth(int width) {
+		this.width = width;
+		return this;
+	}
+
+	public int getHeight() {
+		return height;
+	}
+
+	public StreamDesc setHeight(int height) {
+		this.height = height;
+		return this;
+	}
+
+	@Override
+	public String toString() {
+		return String.format("Stream[uid=%s,type=%s,activities=%s]", uid, type, activities);
+	}
+
+	public JSONObject toJson() {
+		return toJson(false);
+	}
+
+	public JSONObject toJson(boolean self) {
+		JSONObject o = new JSONObject()
+				.put("uid", uid)
+				.put("type", type.name())
+				.put("width", width)
+				.put("height", height)
+				.put("activities", new JSONArray(activities))
+				.put("cuid", client.getUid());
+		return client.addUserJson(client.addCamMic(self, o));
+	}
+}
diff --git a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/WebcamStreamDesc.java b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/WebcamStreamDesc.java
new file mode 100644
index 000000000..18288b5b9
--- /dev/null
+++ b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/basic/WebcamStreamDesc.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License") +  you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.openmeetings.db.entity.basic;
+
+import org.apache.openmeetings.db.entity.basic.Client.Activity;
+import org.apache.openmeetings.db.entity.basic.Client.StreamType;
+
+import com.github.openjson.JSONObject;
+
+public class WebcamStreamDesc extends StreamDesc {
+	private boolean camEnabled = false;
+	private boolean micEnabled = false;
+
+	public WebcamStreamDesc(WebcamStreamDesc sd) {
+		super(sd);
+		this.camEnabled = sd.camEnabled;
+		this.micEnabled = sd.micEnabled;
+	}
+
+	public WebcamStreamDesc(final Client client, Activity toggle) {
+		super(client, StreamType.WEBCAM);
+		setWidth(client.getWidth());
+		setHeight(client.getHeight());
+		// we will add all allowed activities here
+		if (client.isAllowed(Activity.AUDIO)) {
+			activities.add(Activity.AUDIO);
+		}
+		if (client.isAllowed(Activity.VIDEO)) {
+			activities.add(Activity.VIDEO);
+		}
+		if (has(Activity.AUDIO) && has(Activity.VIDEO)) {
+			activities.add(Activity.AUDIO_VIDEO);
+		}
+		switch (toggle) {
+			case AUDIO:
+				if (has(toggle)) {
+					micEnabled = true;
+				}
+				break;
+			case VIDEO:
+				if (has(toggle)) {
+					camEnabled = true;
+				}
+				break;
+			case AUDIO_VIDEO:
+				if (has(toggle)) {
+					micEnabled = true;
+					camEnabled = true;
+				}
+				break;
+			default:
+		}
+	}
+
+	@Override
+	public StreamDesc remove(Activity a) {
+		super.remove(a);
+		switch (a) {
+			case AUDIO:
+				micEnabled = false;
+				break;
+			case VIDEO:
+				camEnabled = false;
+				break;
+			case AUDIO_VIDEO:
+				micEnabled = false;
+				camEnabled = false;
+				break;
+			default:
+		}
+		return this;
+	}
+
+	public void toggle(Activity toggle) {
+		switch (toggle) {
+			case AUDIO:
+				micEnabled = !micEnabled;
+				break;
+			case VIDEO:
+				camEnabled = !camEnabled;
+				break;
+			default:
+		};
+	}
+
+	public boolean isBroadcasting() {
+		return !activities.isEmpty() && (camEnabled || micEnabled);
+	}
+
+	@Override
+	protected boolean allowed(Activity a) {
+		return Activity.AUDIO == a || Activity.VIDEO == a || Activity.AUDIO_VIDEO == a;
+	}
+
+	@Override
+	public JSONObject toJson(boolean self) {
+		return super.toJson(self)
+				.put("camEnabled", camEnabled).put("micEnabled", micEnabled);
+	}
+}
diff --git a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/room/Room.java b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/room/Room.java
index 9dbdf7728..09dd3ebf7 100644
--- a/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/room/Room.java
+++ b/openmeetings-db/src/main/java/org/apache/openmeetings/db/entity/room/Room.java
@@ -377,6 +377,10 @@ public class Room extends HistoricalEntity {
 		this.type = type;
 	}
 
+	public boolean isInterview() {
+		return Type.INTERVIEW == type;
+	}
+
 	public boolean getIspublic() {
 		return ispublic;
 	}
diff --git a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KRoom.java b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KRoom.java
index c91281f8b..810c938b1 100644
--- a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KRoom.java
+++ b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KRoom.java
@@ -34,8 +34,9 @@ import org.apache.openmeetings.IApplication;
 import org.apache.openmeetings.core.util.WebSocketHelper;
 import org.apache.openmeetings.db.dao.record.RecordingDao;
 import org.apache.openmeetings.db.entity.basic.Client;
+import org.apache.openmeetings.db.entity.basic.ScreenStreamDesc;
 import org.apache.openmeetings.db.entity.basic.Client.Activity;
-import org.apache.openmeetings.db.entity.basic.Client.StreamDesc;
+import org.apache.openmeetings.db.entity.basic.StreamDesc;
 import org.apache.openmeetings.db.entity.basic.Client.StreamType;
 import org.apache.openmeetings.db.entity.file.BaseFileItem;
 import org.apache.openmeetings.db.entity.record.Recording;
@@ -119,7 +120,6 @@ public class KRoom {
 
 			log.debug("##REC:: recording in room {} is starting ::", room.getId());
 			Room r = c.getRoom();
-			boolean interview = Room.Type.INTERVIEW == r.getType();
 
 			Date now = new Date();
 
@@ -127,7 +127,7 @@ public class KRoom {
 
 			rec.setHash(randomUUID().toString());
 			final FastDateFormat fdf = FormatHelper.getDateTimeFormat(c.getUser());
-			rec.setName(app.getOmString(interview ? "file.name.interview" : "file.name.recording", c.getUser().getLanguageId())
+			rec.setName(app.getOmString(r.isInterview() ? "file.name.interview" : "file.name.recording", c.getUser().getLanguageId())
 					+ fdf.format(new Date()));
 			User u = c.getUser();
 			recordingUser.put("login", u.getLogin());
@@ -137,7 +137,7 @@ public class KRoom {
 			Long ownerId = User.Type.CONTACT == u.getType() ? u.getOwnerId() : u.getId();
 			rec.setInsertedBy(ownerId);
 			rec.setType(BaseFileItem.Type.RECORDING);
-			rec.setInterview(interview);
+			rec.setInterview(r.isInterview());
 
 			rec.setRoomId(room.getId());
 			rec.setRecordStart(now);
@@ -146,9 +146,9 @@ public class KRoom {
 			rec.setStatus(Recording.Status.RECORDING);
 			log.debug("##REC:: recording created by USER: {}", ownerId);
 
-			Optional<StreamDesc> osd = c.getScreenStream();
+			Optional<ScreenStreamDesc> osd = c.getScreenStream();
 			if (osd.isPresent()) {
-				osd.get().addActivity(Activity.RECORD);
+				osd.get().add(Activity.RECORD);
 				cm.update(c);
 				rec.setWidth(osd.get().getWidth());
 				rec.setHeight(osd.get().getHeight());
@@ -180,9 +180,9 @@ public class KRoom {
 				u = new User();
 			} else {
 				u = c.getUser();
-				Optional<StreamDesc> osd = c.getScreenStream();
+				Optional<ScreenStreamDesc> osd = c.getScreenStream();
 				if (osd.isPresent()) {
-					osd.get().removeActivity(Activity.RECORD);
+					osd.get().remove(Activity.RECORD);
 					cm.update(c);
 					kHandler.sendShareUpdated(osd.get());
 				}
@@ -208,11 +208,11 @@ public class KRoom {
 		return new JSONObject(sharingUser.toString());
 	}
 
-	public void startSharing(Client c, Optional<StreamDesc> osd, JSONObject msg, Activity a) {
-		StreamDesc sd;
+	public void startSharing(Client c, Optional<ScreenStreamDesc> osd, JSONObject msg, Activity a) {
+		ScreenStreamDesc sd;
 		if (sharingStarted.compareAndSet(false, true)) {
 			sharingUser.put("sid", c.getSid());
-			sd = c.addStream(StreamType.SCREEN, a);
+			sd = (ScreenStreamDesc)c.addStream(StreamType.SCREEN, a);
 			cm.update(c);
 			log.debug("Stream.UID {}: sharing has been started, activity: {}", sd.getUid(), a);
 			kHandler.sendClient(sd.getSid(), newKurentoMsg()
@@ -221,9 +221,9 @@ public class KRoom {
 							.put("shareType", msg.getString("shareType"))
 							.put("fps", msg.getString("fps")))
 					.put(PARAM_ICE, kHandler.getTurnServers(c)));
-		} else if (osd.isPresent() && !osd.get().hasActivity(a)) {
+		} else if (osd.isPresent() && !osd.get().has(a)) {
 			sd = osd.get();
-			sd.addActivity(a);
+			sd.add(a);
 			cm.update(c);
 			kHandler.sendShareUpdated(sd);
 			WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
@@ -255,7 +255,6 @@ public class KRoom {
 					.ifPresent(c -> {
 						StreamDesc sd = c.addStream(StreamType.WEBCAM, Activity.AUDIO);
 						sd.setWidth(120).setHeight(90);
-						c.restoreActivities(sd);
 						KStream stream = join(sd);
 						stream.startBroadcast(sd, "", () -> {});
 						cm.update(c);
diff --git a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KStream.java b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KStream.java
index 092d83602..97f7da622 100644
--- a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KStream.java
+++ b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KStream.java
@@ -52,7 +52,7 @@ import org.apache.openmeetings.core.util.WebSocketHelper;
 import org.apache.openmeetings.db.dao.record.RecordingChunkDao;
 import org.apache.openmeetings.db.entity.basic.Client;
 import org.apache.openmeetings.db.entity.basic.Client.Activity;
-import org.apache.openmeetings.db.entity.basic.Client.StreamDesc;
+import org.apache.openmeetings.db.entity.basic.StreamDesc;
 import org.apache.openmeetings.db.entity.basic.Client.StreamType;
 import org.apache.openmeetings.db.entity.record.RecordingChunk.Type;
 import org.apache.openmeetings.db.util.ws.RoomMessage;
@@ -123,9 +123,9 @@ public class KStream extends AbstractStream implements ISipCallbacks {
 		if (outgoingMedia != null) {
 			release(false);
 		}
-		hasAudio = sd.hasActivity(Activity.AUDIO);
-		hasVideo = sd.hasActivity(Activity.VIDEO);
-		hasScreen = sd.hasActivity(Activity.SCREEN);
+		hasAudio = sd.has(Activity.AUDIO);
+		hasVideo = sd.has(Activity.VIDEO);
+		hasScreen = sd.has(Activity.SCREEN);
 		sipClient = OmFileHelper.SIP_USER_ID.equals(sd.getClient().getUserId());
 		if ((sdpOffer.indexOf("m=audio") > -1 && !hasAudio)
 				|| (sdpOffer.indexOf("m=video") > -1 && !hasVideo && StreamType.SCREEN != streamType))
@@ -295,10 +295,10 @@ public class KStream extends AbstractStream implements ISipCallbacks {
 			if (sd == null) {
 				log.warn("Stream for endpoint dooesn't exists");
 			} else {
-				if (sd.hasActivity(Activity.AUDIO)) {
+				if (sd.has(Activity.AUDIO)) {
 					outgoingMedia.connect(listener, MediaType.AUDIO);
 				}
-				if (StreamType.SCREEN == streamType || sd.hasActivity(Activity.VIDEO)) {
+				if (StreamType.SCREEN == streamType || sd.has(Activity.VIDEO)) {
 					outgoingMedia.connect(listener, MediaType.VIDEO);
 				}
 			}
diff --git a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KurentoHandler.java b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KurentoHandler.java
index 13b01bf2f..0f6fe7876 100644
--- a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KurentoHandler.java
+++ b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/KurentoHandler.java
@@ -44,11 +44,9 @@ import javax.crypto.spec.SecretKeySpec;
 import org.apache.openmeetings.core.util.WebSocketHelper;
 import org.apache.openmeetings.db.dao.room.RoomDao;
 import org.apache.openmeetings.db.entity.basic.Client;
-import org.apache.openmeetings.db.entity.basic.Client.Activity;
-import org.apache.openmeetings.db.entity.basic.Client.StreamDesc;
+import org.apache.openmeetings.db.entity.basic.StreamDesc;
 import org.apache.openmeetings.db.entity.basic.IWsClient;
 import org.apache.openmeetings.db.entity.room.Room;
-import org.apache.openmeetings.db.entity.room.Room.Right;
 import org.apache.openmeetings.db.entity.user.User;
 import org.apache.openmeetings.db.manager.IClientManager;
 import org.apache.openmeetings.db.util.ws.RoomMessage;
@@ -320,24 +318,6 @@ public class KurentoHandler {
 		return new JSONObject().put("type", KURENTO_TYPE);
 	}
 
-	public static boolean activityAllowed(Client c, Activity a, Room room) {
-		boolean r = false;
-		switch (a) {
-			case AUDIO:
-				r = c.hasRight(Right.AUDIO);
-				break;
-			case VIDEO:
-				r = !room.isAudioOnly() && c.hasRight(Right.VIDEO);
-				break;
-			case AUDIO_VIDEO:
-				r = !room.isAudioOnly() && c.hasRight(Right.AUDIO) && c.hasRight(Right.VIDEO);
-				break;
-			default:
-				break;
-		}
-		return r;
-	}
-
 	public JSONArray getTurnServers(Client c) {
 		return getTurnServers(c, false);
 	}
diff --git a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessor.java b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessor.java
index 0c261ae4a..4738d69b1 100644
--- a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessor.java
+++ b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessor.java
@@ -20,16 +20,12 @@
 package org.apache.openmeetings.mediaserver;
 
 import static org.apache.openmeetings.mediaserver.KurentoHandler.PARAM_ICE;
-import static org.apache.openmeetings.mediaserver.KurentoHandler.activityAllowed;
 import static org.apache.openmeetings.mediaserver.KurentoHandler.newKurentoMsg;
 import static org.apache.openmeetings.util.OpenmeetingsVariables.isRecordingsEnabled;
 
 import java.util.Collection;
-import java.util.HashSet;
-import java.util.List;
 import java.util.Map;
 import java.util.Optional;
-import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Stream;
 
@@ -39,8 +35,10 @@ import org.apache.openmeetings.core.converter.RecordingConverter;
 import org.apache.openmeetings.core.util.WebSocketHelper;
 import org.apache.openmeetings.db.dao.record.RecordingDao;
 import org.apache.openmeetings.db.entity.basic.Client;
+import org.apache.openmeetings.db.entity.basic.ScreenStreamDesc;
 import org.apache.openmeetings.db.entity.basic.Client.Activity;
-import org.apache.openmeetings.db.entity.basic.Client.StreamDesc;
+import org.apache.openmeetings.db.entity.basic.StreamDesc;
+import org.apache.openmeetings.db.entity.basic.WebcamStreamDesc;
 import org.apache.openmeetings.db.entity.basic.Client.StreamType;
 import org.apache.openmeetings.db.entity.record.Recording;
 import org.apache.openmeetings.db.entity.room.Room;
@@ -56,7 +54,6 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.core.task.TaskExecutor;
 import org.springframework.stereotype.Component;
 
-import com.github.openjson.JSONArray;
 import com.github.openjson.JSONObject;
 
 @Component
@@ -85,7 +82,6 @@ public class StreamProcessor implements IStreamProcessor {
 	@TimedApplication
 	void onMessage(Client c, final String cmdId, JSONObject msg) {
 		final String uid = msg.optString("uid");
-		Optional<StreamDesc> osd;
 		log.debug("Incoming message from user with ID '{}': {}", c.getUserId(), msg);
 		switch (cmdId) {
 			case "devicesAltered":
@@ -106,11 +102,12 @@ public class StreamProcessor implements IStreamProcessor {
 			case "addListener":
 				streamProcessorActions.addListener(c, msg);
 				break;
-			case "wannaShare":
-				osd = c.getScreenStream();
-				if (screenShareAllowed(c) || (osd.isPresent() && !osd.get().hasActivity(Activity.SCREEN))) {
+			case "wannaShare": {
+				Optional<ScreenStreamDesc> osd = c.getScreenStream();
+				if (screenShareAllowed(c) || (osd.isPresent() && !osd.get().has(Activity.SCREEN))) {
 					startSharing(c, osd, msg, Activity.SCREEN);
 				}
+			}
 				break;
 			case "wannaRecord":
 				onWannaRecord(c, msg);
@@ -133,22 +130,21 @@ public class StreamProcessor implements IStreamProcessor {
 	private void onDeviceAltered(Client c, String uid, JSONObject msg) {
 		StreamDesc sd = c.getStream(uid);
 		if (sd != null) {
-			if (!msg.getBoolean("audio") && c.hasActivity(Activity.AUDIO)) {
-				c.remove(Activity.AUDIO);
+			if (!msg.getBoolean("audio") && sd.has(Activity.AUDIO)) {
+				sd.remove(Activity.AUDIO);
 			}
-			if (!msg.getBoolean("video") && c.hasActivity(Activity.VIDEO)) {
-				c.remove(Activity.VIDEO);
+			if (!msg.getBoolean("video") && sd.has(Activity.VIDEO)) {
+				sd.remove(Activity.VIDEO);
 			}
-			sd.setActivities();
 			WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), cm.update(c), RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
 		}
 	}
 
 	private void onWannaRecord(Client c, JSONObject msg) {
-		Optional<StreamDesc> osd = c.getScreenStream();
+		Optional<ScreenStreamDesc> osd = c.getScreenStream();
 		if (recordingAllowed(c)) {
 			Room r = c.getRoom();
-			if (Room.Type.INTERVIEW == r.getType()) {
+			if (r.isInterview()) {
 				log.warn("This shouldn't be called for interview room");
 				return;
 			}
@@ -174,25 +170,6 @@ public class StreamProcessor implements IStreamProcessor {
 		stream.startBroadcast(sd, sdpOffer, then);
 	}
 
-	private static boolean isBroadcasting(final Client c) {
-		return c.hasAnyActivity(Activity.AUDIO, Activity.VIDEO);
-	}
-
-	private Set<String> cleanWebCams(Client c, List<StreamDesc> streams) {
-		Set<String> closed = new HashSet<>();
-		streams.stream()
-			.filter(lsd -> StreamType.WEBCAM == lsd.getType())
-			.forEach(lsd -> {
-				KStream s = getByUid(lsd.getUid());
-				if (s != null) {
-					s.stopBroadcast();
-				}
-				c.removeStream(lsd.getUid());
-				closed.add(lsd.getUid());
-			});
-		return closed;
-	}
-
 	@TimedApplication
 	public void onToggleActivity(Client c, Activity a) {
 		log.info("PARTICIPANT {}: trying to toggle activity {}", c, a);
@@ -200,8 +177,7 @@ public class StreamProcessor implements IStreamProcessor {
 			return;
 		}
 
-		if (activityAllowed(c, a, c.getRoom())) {
-			boolean wasBroadcasting = isBroadcasting(c);
+		if (c.isAllowed(a)) {
 			if (a == Activity.AUDIO && !c.isMicEnabled()) {
 				return;
 			}
@@ -211,58 +187,42 @@ public class StreamProcessor implements IStreamProcessor {
 			if (a == Activity.AUDIO_VIDEO && !c.isMicEnabled() && !c.isCamEnabled()) {
 				return;
 			}
-			c.toggle(a);
-			List<StreamDesc> streams = c.getStreams();
-			if (!isBroadcasting(c)) {
-				Set<String> closed = cleanWebCams(c, streams);
-				if (!closed.isEmpty()) {
-					cm.update(c);
-					checkStreams(c.getRoomId());
-					WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
+			Optional<WebcamStreamDesc> cam = c.getCamStreams().findFirst();
+			if (cam.isPresent()) {
+				WebcamStreamDesc camStr = cam.get();
+				camStr.toggle(a);
+				if (!camStr.isBroadcasting()) {
+					KStream s = getByUid(camStr.getUid());
+					if (s != null) {
+						s.stopBroadcast();
+					}
+					c.removeStream(camStr.getUid());
 				}
+				cm.update(c);
+				WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
 			} else {
-				StreamDesc sd = c.addStream(StreamType.WEBCAM);
-				Set<String> closed = wasBroadcasting ? cleanWebCams(c, streams) : Set.of();
-				cm.update(c.restoreActivities(sd));
+				StreamDesc sd = c.addStream(StreamType.WEBCAM, a);
 				log.debug("User {}: has started broadcast", sd.getUid());
 				kHandler.sendClient(sd.getSid(), newKurentoMsg()
 						.put("id", "broadcast")
 						.put("stream", sd.toJson(true))
-						.put("cleanup", new JSONArray(closed))
 						.put(PARAM_ICE, kHandler.getTurnServers(c, false)));
 			}
 		}
 	}
 
-	private void constraintsChanged(Client c) {
-		//constraints were changed
-		c.getStreams().stream()
-			.filter(sd -> StreamType.WEBCAM == sd.getType())
-			.findFirst()
-			.ifPresent(sd -> {
-				sd.setActivities();
-				cm.update(c);
-			});
-	}
-
 	public void rightsUpdated(Client c) {
-		Optional<StreamDesc> osd = c.getScreenStream();
+		Optional<ScreenStreamDesc> osd = c.getScreenStream();
 		if (osd.isPresent() && !hasRightsToShare(c)) {
 			stopSharing(c, osd.get().getUid());
 		}
-		if (isBroadcasting(c)) {
-			constraintsChanged(c);
-		} else {
-			c.getStreams().stream()
-				.filter(sd -> StreamType.WEBCAM == sd.getType())
-				.forEach(sd -> {
-					KStream stream = streamByUid.get(sd.getUid());
-					if (stream != null) {
-						KRoom room = kHandler.getRoom(c.getRoomId());
-						room.onStopBroadcast(stream);
-					}
-				});
-		}
+		c.getCamStreams()
+			.filter(sd -> !sd.isBroadcasting())
+			.map(sd -> streamByUid.get(sd.getUid()))
+			.forEach(stream -> {
+				KRoom room = kHandler.getRoom(c.getRoomId());
+				room.onStopBroadcast(stream);
+			});
 		WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
 	}
 
@@ -279,7 +239,7 @@ public class StreamProcessor implements IStreamProcessor {
 		{
 			log.info("No more screen streams in the room, stopping sharing");
 			kRoom.stopSharing();
-			if (Room.Type.INTERVIEW != kRoom.getRoom().getType() && kRoom.isRecording()) {
+			if (!kRoom.getRoom().isInterview() && kRoom.isRecording()) {
 				log.info("No more screen streams in the non-interview room, stopping recording");
 				kRoom.stopRecording(null);
 			}
@@ -300,7 +260,7 @@ public class StreamProcessor implements IStreamProcessor {
 			return false;
 		}
 		Room r = c.getRoom();
-		return r != null && Room.Type.INTERVIEW != r.getType()
+		return r != null && !r.isInterview()
 				&& !r.isHidden(RoomElement.SCREEN_SHARING)
 				&& c.hasRight(Right.SHARE);
 	}
@@ -318,7 +278,7 @@ public class StreamProcessor implements IStreamProcessor {
 		if (!room.isSharing() || !c.getSid().equals(room.getSharingUser().getString("sid"))) {
 			return;
 		}
-		Optional<StreamDesc> osd = c.getScreenStream();
+		Optional<ScreenStreamDesc> osd = c.getScreenStream();
 		if (osd.isPresent()) {
 			stopSharing(c, osd.get().getUid());
 		} else {
@@ -327,7 +287,7 @@ public class StreamProcessor implements IStreamProcessor {
 		stopRecording(c);
 	}
 
-	private void startSharing(Client c, Optional<StreamDesc> osd, JSONObject msg, Activity a) {
+	private void startSharing(Client c, Optional<ScreenStreamDesc> osd, JSONObject msg, Activity a) {
 		if (kHandler.isConnected() && c.getRoomId() != null) {
 			kHandler.getRoom(c.getRoomId()).startSharing(c, osd, msg, a);
 		}
@@ -349,8 +309,8 @@ public class StreamProcessor implements IStreamProcessor {
 			return;
 		}
 		if (isRecording(c.getRoomId())) {
-			StreamDesc sd = c.getStream(uid);
-			sd.removeActivity(Activity.SCREEN);
+			ScreenStreamDesc sd = (ScreenStreamDesc)c.getStream(uid);
+			sd.remove(Activity.SCREEN);
 			cm.update(c);
 			KStream sender = getByUid(uid);
 			sender.pauseSharing();
@@ -379,14 +339,12 @@ public class StreamProcessor implements IStreamProcessor {
 		StreamDesc sd = null;
 		if (c.getRoomId() != null) {
 			sd = c.getStream(uid);
-			if (sd != null && StreamType.SCREEN == sd.getType()) {
+			if (sd instanceof ScreenStreamDesc scr) {
 				c.removeStream(uid);
 				cm.update(c);
 				checkStreams(c.getRoomId());
 				WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
-				kHandler.sendShareUpdated(sd
-						.removeActivity(Activity.SCREEN)
-						.removeActivity(Activity.RECORD));
+				kHandler.sendShareUpdated(scr.remove(Activity.SCREEN).remove(Activity.RECORD));
 			}
 		}
 		return sd;
@@ -429,7 +387,7 @@ public class StreamProcessor implements IStreamProcessor {
 
 		// In case this user wasn't shareing his screen we also need to close that one
 		c.getScreenStream().ifPresent(sd -> {
-			if (!sd.hasActivity(Activity.SCREEN)) {
+			if (!sd.has(Activity.SCREEN)) {
 				pauseSharing(c, sd.getUid());
 			}
 		});
@@ -516,11 +474,6 @@ public class StreamProcessor implements IStreamProcessor {
 			StreamDesc sd = c.getStream(uid);
 			if (sd != null) {
 				c.removeStream(uid);
-				if (StreamType.WEBCAM == sd.getType()) {
-					for (Activity a : sd.getActivities()) {
-						c.remove(a);
-					}
-				}
 				cm.update(c);
 				WebSocketHelper.sendRoom(new TextRoomMessage(c.getRoomId(), c, RoomMessage.Type.RIGHT_UPDATED, c.getUid()));
 			}
diff --git a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessorActions.java b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessorActions.java
index 787e9e08b..eafe4cae4 100644
--- a/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessorActions.java
+++ b/openmeetings-mediaserver/src/main/java/org/apache/openmeetings/mediaserver/StreamProcessorActions.java
@@ -24,9 +24,9 @@ import static org.apache.openmeetings.mediaserver.KurentoHandler.sendError;
 
 import org.apache.openmeetings.core.util.WebSocketHelper;
 import org.apache.openmeetings.db.entity.basic.Client;
+import org.apache.openmeetings.db.entity.basic.ScreenStreamDesc;
+import org.apache.openmeetings.db.entity.basic.StreamDesc;
 import org.apache.openmeetings.db.entity.basic.Client.Activity;
-import org.apache.openmeetings.db.entity.basic.Client.StreamDesc;
-import org.apache.openmeetings.db.entity.basic.Client.StreamType;
 import org.apache.openmeetings.db.manager.IClientManager;
 import org.apache.openmeetings.util.logging.TimedApplication;
 import org.apache.wicket.util.string.Strings;
@@ -79,7 +79,7 @@ public class StreamProcessorActions {
 			if (sd == null) {
 				return;
 			}
-			if (StreamType.SCREEN == sd.getType() && sd.hasActivity(Activity.RECORD) && !sd.hasActivity(Activity.SCREEN)) {
+			if (sd instanceof ScreenStreamDesc scr && scr.has(Activity.RECORD) && !scr.has(Activity.SCREEN)) {
 				return;
 			}
 			sender.addListener(c.getSid(), c.getUid(), msg.getString("sdpOffer"));
@@ -114,7 +114,7 @@ public class StreamProcessorActions {
 				cm.update(c);
 			}
 			streamProcessor.startBroadcast(sender, sd, msg.getString("sdpOffer"), () -> {
-				if (StreamType.SCREEN == sd.getType() && sd.hasActivity(Activity.RECORD) && !streamProcessor.isRecording(c.getRoomId())) {
+				if (sd instanceof ScreenStreamDesc scr && scr.has(Activity.RECORD) && !streamProcessor.isRecording(c.getRoomId())) {
 					streamProcessor.startRecording(c);
 				}
 			});
diff --git a/openmeetings-mediaserver/src/test/java/org/apache/openmeetings/mediaserver/TestRecordingFlowMocked.java b/openmeetings-mediaserver/src/test/java/org/apache/openmeetings/mediaserver/TestRecordingFlowMocked.java
index c116d808b..319eb1d25 100644
--- a/openmeetings-mediaserver/src/test/java/org/apache/openmeetings/mediaserver/TestRecordingFlowMocked.java
+++ b/openmeetings-mediaserver/src/test/java/org/apache/openmeetings/mediaserver/TestRecordingFlowMocked.java
@@ -34,7 +34,7 @@ import org.apache.openmeetings.db.dao.room.RoomDao;
 import org.apache.openmeetings.db.dao.user.UserDao;
 import org.apache.openmeetings.db.entity.basic.Client;
 import org.apache.openmeetings.db.entity.basic.Client.Activity;
-import org.apache.openmeetings.db.entity.basic.Client.StreamDesc;
+import org.apache.openmeetings.db.entity.basic.StreamDesc;
 import org.apache.openmeetings.db.entity.record.Recording;
 import org.apache.openmeetings.db.entity.room.Room;
 import org.apache.openmeetings.db.entity.user.User;
diff --git a/openmeetings-web/src/main/front/room/src/sharer.js b/openmeetings-web/src/main/front/room/src/sharer.js
index 682657124..774960ff7 100644
--- a/openmeetings-web/src/main/front/room/src/sharer.js
+++ b/openmeetings-web/src/main/front/room/src/sharer.js
@@ -28,6 +28,9 @@ function _init() {
 		, autoOpen: false
 		, resizable: false
 	});
+	const ui = sharer.closest('.ui-dialog');
+	const parent = $('.room-block .room-container .sb-wb');
+	ui.draggable('option', 'containment', parent);
 	fixJQueryUIDialogTouch(sharer);
 
 	if (!VideoUtil.sharingSupported()) {
diff --git a/openmeetings-web/src/main/front/room/src/user-list.js b/openmeetings-web/src/main/front/room/src/user-list.js
index d681358c3..e56eed39b 100644
--- a/openmeetings-web/src/main/front/room/src/user-list.js
+++ b/openmeetings-web/src/main/front/room/src/user-list.js
@@ -134,8 +134,9 @@ function _updateClient(c) {
 	}
 	const self = c.uid === options.uid
 		, le = _getClient(c.uid)
-		, hasAudio = VideoUtil.hasMic(c)
-		, hasVideo = VideoUtil.hasCam(c)
+		, selfCamStream = c.streams.find(s => s.type === 'WEBCAM')
+		, hasAudio = VideoUtil.hasMic(self && selfCamStream ? selfCamStream : c)
+		, hasVideo = VideoUtil.hasCam(self && selfCamStream ? selfCamStream : c)
 		, speaks = le.find('.audio-activity');
 	if (le.length === 0) {
 		return;
@@ -194,20 +195,20 @@ function _updateClient(c) {
 		__activityAVIcon(
 				header
 				, '.activity.cam'
-				, () => !options.audioOnly && UserListUtil.hasRight('VIDEO')
+				, () => !options.audioOnly && UserListUtil.hasRight(VideoUtil.CAM_ACTIVITY)
 				, () => hasVideo
 				, () => Settings.load().video.cam < 0)
 			.off().click(function() {
-				VideoManager.toggleActivity('VIDEO');
+				VideoManager.toggleActivity(VideoUtil.CAM_ACTIVITY);
 			});;
 		__rightAudioIcon(c, header);
 		__activityAVIcon(
 				header
-				, '.activity.mic', () => UserListUtil.hasRight('AUDIO')
+				, '.activity.mic', () => UserListUtil.hasRight(VideoUtil.MIC_ACTIVITY)
 				, () => hasAudio
 				, () => Settings.load().video.mic < 0)
 			.off().click(function() {
-				VideoManager.toggleActivity('AUDIO');
+				VideoManager.toggleActivity(VideoUtil.MIC_ACTIVITY);
 			});
 		__rightOtherIcons(c, header);
 	}
diff --git a/openmeetings-web/src/main/front/room/src/video.js b/openmeetings-web/src/main/front/room/src/video.js
index 3477c4bae..8ca6ea7c5 100644
--- a/openmeetings-web/src/main/front/room/src/video.js
+++ b/openmeetings-web/src/main/front/room/src/video.js
@@ -11,6 +11,13 @@ module.exports = class Video {
 			, lm, level, userSpeaks = false, muteOthers
 			, hasVideo, isSharing, isRecording;
 
+		function __getState() {
+			const state = states.length > 0 ? states[0] : null;
+			if (!state || state.disposed) {
+				return null;
+			}
+			return state;
+		}
 		function __getVideo(_state) {
 			const vid = self.video(_state);
 			return vid && vid.length > 0 ? vid[0] : null;
@@ -69,12 +76,12 @@ module.exports = class Video {
 		}
 		function _getVideoStream(msg, state, callback) {
 			VideoSettings.constraints(sd, function(cnts) {
-				if ((VideoUtil.hasCam(sd) && !cnts.video) || (VideoUtil.hasMic(sd) && !cnts.audio)) {
+				if (VideoUtil.hasCam(sd) !== cnts.videoEnabled || VideoUtil.hasMic(sd) !== cnts.audioEnabled) {
 					VideoMgrUtil.sendMessage({
 						id : 'devicesAltered'
 						, uid: sd.uid
-						, audio: !!cnts.audio
-						, video: !!cnts.video
+						, audio: cnts.audioEnabled
+						, video: cnts.videoEnabled
 					});
 				}
 				if (!cnts.audio && !cnts.video) {
@@ -87,6 +94,8 @@ module.exports = class Video {
 						if (state.disposed || msg.instanceUid !== v.data('instance-uid')) {
 							return;
 						}
+						stream.getVideoTracks().forEach(track => track.enabled = cnts.videoEnabled);
+						stream.getAudioTracks().forEach(track => track.enabled = cnts.audioEnabled);
 						state.localStream = stream;
 						let _stream = stream;
 						const data = {};
@@ -158,7 +167,7 @@ module.exports = class Video {
 				, onConnectionStateChange: () => __connectionStateChangeListener(state)
 			};
 			const vid = __getVideo(state);
-			vid.srcObject = state.stream;
+			VideoUtil.playSrc(vid, state.stream);
 
 			const data = state.data;
 			data.rtcPeer = new WebRtcPeerSendonly(VideoUtil.addIceServers(state.options, msg));
@@ -294,12 +303,16 @@ module.exports = class Video {
 						VideoMgrUtil.close(sd.uid, true);
 					});
 				}
+				const ui = v.closest('.ui-dialog');
+				const parent = $('.room-block .room-container .sb-wb');
+				ui.draggable('option', 'containment', parent);
+				ui.resizable('option', 'containment', parent);
 			}
 			_initDialogBtns(opts);
 		}
 		function _initDialogBtns(opts) {
-			function noDblClick(e) {
-				e.dblclick(function(e) {
+			function noDblClick(elem) {
+				elem.dblclick(function(e) {
 					e.stopImmediatePropagation();
 					return false;
 				});
@@ -359,10 +372,37 @@ module.exports = class Video {
 				&& prevA.every(function(value, index) { return value === sd.activities[index]})
 				&& prevW === sd.width && prevH === sd.height
 				&& prevCam == sd.cam && prevMic === sd.mic;
+			const camChanged = sd.camEnabled !== _c.camEnabled
+				, micChanged = sd.micEnabled !== _c.micEnabled
 			if (sd.self && !same) {
 				_cleanup();
 				v.remove();
 				_init({stream: sd, iceServers: iceServers});
+			} else if (camChanged || micChanged) {
+				sd.micEnabled = _c.micEnabled;
+				sd.camEnabled = _c.camEnabled;
+				const state = __getState();
+				if (camChanged) {
+					v.off();
+					if (v.dialog('instance')) {
+						v.dialog('destroy');
+					}
+					v.remove();
+					__initUI(v.data('instance-uid'));
+					__createVideo(state);
+					VideoUtil.playSrc(state.video[0], state.stream || state.rStream);
+					if (state.data.analyser) {
+						lm = vc.find('.level-meter');
+						level.setCanvas(lm);
+					}
+				}
+				if (micChanged) {
+					__updateVideo(state);
+				}
+				if (sd.self) {
+					state.localStream.getVideoTracks().forEach(track => track.enabled = sd.camEnabled);
+					state.localStream.getAudioTracks().forEach(track => track.enabled = sd.micEnabled);
+				}
 			}
 		}
 		function __createVideo(state) {
@@ -384,9 +424,13 @@ module.exports = class Video {
 				vc.parents('.ui-dialog').removeClass('audio-only');
 				state.video.attr('poster', sd.user.pictureUri);
 			} else {
+				state.video.attr('poster', null);
 				vc.addClass('audio-only');
 			}
 			vc.append(state.video);
+			__updateVideo(state);
+		}
+		function __updateVideo(state) {
 			if (VideoUtil.hasMic(sd)) {
 				const volIco = vol.create(self)
 				if (hasVideo) {
@@ -478,14 +522,14 @@ module.exports = class Video {
 			while(state = states.pop()) {
 				state.disposed = true;
 				if (state.options) {
-					delete state.options.videoStream;
 					delete state.options.mediaConstraints;
-					delete state.options.onicecandidate;
+					delete state.options.onIceCandidate;
 					state.options = null;
 				}
 				_cleanData(state.data);
 				VideoUtil.cleanStream(state.localStream);
 				VideoUtil.cleanStream(state.stream);
+				VideoUtil.cleanStream(state.rStream);
 				state.data = null;
 				state.localStream = null;
 				state.stream = null;
@@ -530,43 +574,30 @@ module.exports = class Video {
 			});
 		}
 		function _processSdpAnswer(answer) {
-			const state = states.length > 0 ? states[0] : null;
-			if (!state || state.disposed || !state.data.rtcPeer || state.data.rtcPeer.cleaned) {
+			const state = __getState();
+			if (!state || !state.data.rtcPeer) {
 				return;
 			}
 			state.data.rtcPeer.processRemoteAnswer(answer)
 				.then(() => {
 					const video = __getVideo(state);
-					const rStream = state.data.rtcPeer.pc.getRemoteStreams()[0];
-					if (rStream) {
-						video.srcObject = rStream;
-					}
-					if (state.data.rtcPeer.pc.signalingState === 'stable' && video && video.paused) {
-						video.play().catch(err => {
-							if ('NotAllowedError' === err.name) {
-								VideoUtil.askPermission(() => video.play());
-							}
-						});
-					}
+					state.rStream = state.data.rtcPeer.pc.getRemoteStreams()[0];
+					VideoUtil.playSrc(video, state.rStream);
 				})
 				.catch(error => OmUtil.error(error, true));
 		}
 		function _processIceCandidate(candidate) {
-			const state = states.length > 0 ? states[0] : null;
-			if (!state || state.disposed || !state.data.rtcPeer || state.data.rtcPeer.cleaned) {
+			const state = __getState();
+			if (!state || !state.data.rtcPeer) {
 				return;
 			}
 			state.data.rtcPeer.addIceCandidate(candidate)
 				.catch(error => OmUtil.error('Error adding candidate: ' + error, true));
 		}
-		function _init(_msg) {
-			sd = _msg.stream;
-			_msg.instanceUid = uuidv4();
+		function __initUI(instanceUid) {
 			if (!vol) {
 				vol = new Volume();
 			}
-			iceServers = _msg.iceServers;
-			sd.activities = sd.activities.sort();
 			isSharing = VideoUtil.isSharing(sd);
 			isRecording = VideoUtil.isRecording(sd);
 			const _id = VideoUtil.getVid(sd.uid)
@@ -575,7 +606,7 @@ module.exports = class Video {
 				, _h = sd.height
 				, opts = Room.getOptions();
 			sd.self = sd.cuid === opts.uid;
-			const contSel = _initContainer(_id, name, opts, _msg.instanceUid);
+			const contSel = _initContainer(_id, name, opts, instanceUid);
 			footer = v.find('.footer');
 			if (!opts.showMicStatus) {
 				footer.hide();
@@ -605,7 +636,13 @@ module.exports = class Video {
 			if (hasVideo) {
 				vc.width(_w).height(_h);
 			}
-
+		}
+		function _init(_msg) {
+			sd = _msg.stream;
+			sd.activities = sd.activities.sort();
+			_msg.instanceUid = uuidv4();
+			iceServers = _msg.iceServers;
+			__initUI(_msg.instanceUid);
 			_refresh(_msg);
 		}
 
@@ -633,7 +670,7 @@ module.exports = class Video {
 		};
 		this.reattachStream = _reattachStream;
 		this.video = function(_state) {
-			const state = _state || (states.length > 0 ? states[0] : null);
+			const state = _state || __getState();
 			if (!state || state.disposed) {
 				return null;
 			}
diff --git a/openmeetings-web/src/main/front/settings/src/mic-level.js b/openmeetings-web/src/main/front/settings/src/mic-level.js
index 9fb3edc00..d3c6d775f 100644
--- a/openmeetings-web/src/main/front/settings/src/mic-level.js
+++ b/openmeetings-web/src/main/front/settings/src/mic-level.js
@@ -3,9 +3,11 @@ const RingBuffer = require('./ring-buffer');
 
 module.exports = class MicLevel {
 	constructor() {
-		let ctx, mic, analyser, vol = .0, vals = new RingBuffer(100);
+		let ctx, mic, analyser
+			, cnvs, canvasCtx, WIDTH, HEIGHT, horiz
+			, vol = .0, vals = new RingBuffer(100);
 
-		this.meterStream = (stream, cnvs, _micActivity, _error, connectAudio) => {
+		this.meterStream = (stream, _cnvs, _micActivity, _error, connectAudio) => {
 			if (!stream || stream.getAudioTracks().length < 1) {
 				return;
 			}
@@ -22,26 +24,30 @@ module.exports = class MicLevel {
 				if (connectAudio) {
 					analyser.connect(ctx.destination);
 				}
-				this.meter(analyser, cnvs, _micActivity, _error);
+				this.meter(analyser, _cnvs, _micActivity, _error);
 			} catch (err) {
 				_error(err);
 			}
 		};
-		this.meter = (_analyser, cnvs, _micActivity, _error) => {
+		this.setCanvas = (_cnvs) => {
+			cnvs = _cnvs;
+			const canvas = cnvs[0];
+			canvasCtx = canvas.getContext('2d');
+			WIDTH = canvas.width;
+			HEIGHT = canvas.height;
+			horiz = cnvs.data('orientation') === 'horizontal';
+		};
+		this.meter = (_analyser, _cnvs, _micActivity, _error) => {
+			this.setCanvas(_cnvs);
 			try {
 				analyser = _analyser;
 				analyser.minDecibels = -90;
 				analyser.maxDecibels = -10;
 				analyser.fftSize = 256;
-				const canvas = cnvs[0]
-					, color = $('body').css('--level-color')
-					, canvasCtx = canvas.getContext('2d')
+				const color = $('body').css('--level-color')
 					, al = analyser.frequencyBinCount
-					, arr = new Uint8Array(al)
-					, horiz = cnvs.data('orientation') === 'horizontal';
+					, arr = new Uint8Array(al);
 				function update() {
-					const WIDTH = canvas.width
-						, HEIGHT = canvas.height;
 					canvasCtx.clearRect(0, 0, WIDTH, HEIGHT);
 					if (!!analyser && cnvs.length > 0) {
 						if (cnvs.is(':visible')) {
diff --git a/openmeetings-web/src/main/front/settings/src/settings.js b/openmeetings-web/src/main/front/settings/src/settings.js
index 6c16d4cc3..795108ff1 100644
--- a/openmeetings-web/src/main/front/settings/src/settings.js
+++ b/openmeetings-web/src/main/front/settings/src/settings.js
@@ -149,8 +149,11 @@ function _setCntsDimensions(cnts) {
 // min/ideal/max/exact/mandatory can also be used
 function _constraints(sd, callback) {
 	_getDevConstraints(function(devCnts) {
-		const cnts = {};
-		if (devCnts.video && false === o.audioOnly && VideoUtil.hasCam(sd) && s.video.cam > -1) {
+		const cnts = {
+			videoEnabled: VideoUtil.hasCam(sd)
+			, audioEnabled: VideoUtil.hasMic(sd)
+		};
+		if (devCnts.video && false === o.audioOnly && s.video.cam > -1) {
 			cnts.video = {
 				frameRate: o.camera.fps
 			};
@@ -167,7 +170,7 @@ function _constraints(sd, callback) {
 		} else {
 			cnts.video = false;
 		}
-		if (devCnts.audio && VideoUtil.hasMic(sd) && s.video.mic > -1) {
+		if (devCnts.audio && s.video.mic > -1) {
 			cnts.audio = {
 				sampleRate: o.microphone.rate
 				, echoCancellation: o.microphone.echo
@@ -206,7 +209,7 @@ function _readValues(msg, func) {
 			}, msg);
 			navigator.mediaDevices.getUserMedia(cnts)
 				.then(stream => {
-					vid[0].srcObject = stream;
+					VideoUtil.playSrc(vid[0], stream);
 					options.mediaStream = stream;
 
 					rtcPeer = new WebRtcPeerSendonly(options);
@@ -403,7 +406,7 @@ function _onKMessage(m) {
 				.then(() => {
 					const stream = rtcPeer.stream;
 					if (stream) {
-						vid[0].srcObject = stream;
+						VideoUtil.playSrc(vid[0], stream);
 						lm.show();
 						level = new MicLevel();
 						level.meterStream(stream, lm, function(){}, OmUtil.error, true);
diff --git a/openmeetings-web/src/main/front/settings/src/video-util.js b/openmeetings-web/src/main/front/settings/src/video-util.js
index d13f1e5c9..1d6518b08 100644
--- a/openmeetings-web/src/main/front/settings/src/video-util.js
+++ b/openmeetings-web/src/main/front/settings/src/video-util.js
@@ -18,10 +18,18 @@ function _isRecording(sd) {
 	return !!sd && 'SCREEN' === sd.type && sd.activities.includes(REC_ACTIVITY);
 }
 function _hasMic(sd) {
-	return !sd || sd.activities.includes(MIC_ACTIVITY);
+	if (!sd) {
+		return true;
+	}
+	const enabled = sd.micEnabled !== false;
+	return sd.activities.includes(MIC_ACTIVITY) && enabled;
 }
 function _hasCam(sd) {
-	return !sd || sd.activities.includes(CAM_ACTIVITY);
+	if (!sd) {
+		return true;
+	}
+	const enabled = sd.camEnabled !== false;
+	return sd.activities.includes(CAM_ACTIVITY) && enabled;
 }
 function _hasVideo(sd) {
 	return _hasCam(sd) || _isSharing(sd) || _isRecording(sd);
@@ -276,10 +284,24 @@ function _highlight(el, clazz, count) {
 		next();
 	});
 }
+function _playSrc(_video, _stream) {
+	if (_stream && _video) {
+		_video.srcObject = _stream;
+		if (_video.paused) {
+			_video.play().catch(err => {
+				if ('NotAllowedError' === err.name) {
+					_askPermission(() => _video.play());
+				}
+			});
+		}
+	}
+}
 
 module.exports = {
 	VIDWIN_SEL: VIDWIN_SEL
 	, VID_SEL: VID_SEL
+	, CAM_ACTIVITY: CAM_ACTIVITY
+	, MIC_ACTIVITY: MIC_ACTIVITY
 
 	, getVid: _getVid
 	, isSharing: _isSharing
@@ -305,4 +327,5 @@ module.exports = {
 	, disconnect: _disconnect
 	, sharingSupported: _sharingSupported
 	, highlight: _highlight
+	, playSrc: _playSrc
 };
diff --git a/openmeetings-web/src/main/java/org/apache/openmeetings/web/admin/connection/KStreamDto.java b/openmeetings-web/src/main/java/org/apache/openmeetings/web/admin/connection/KStreamDto.java
index 796d57d7e..4f7c590cc 100644
--- a/openmeetings-web/src/main/java/org/apache/openmeetings/web/admin/connection/KStreamDto.java
+++ b/openmeetings-web/src/main/java/org/apache/openmeetings/web/admin/connection/KStreamDto.java
@@ -53,7 +53,7 @@ public class KStreamDto implements IDataProviderEntity {
 		roomId = kStream.getRoomId();
 		connectedSince = kStream.getConnectedSince();
 		streamType = kStream.getStreamType();
-		profile = kStream.getProfile().toString();
+		profile = "" + kStream.getProfile();
 		recorder = (kStream.getRecorder() == null) ? null : kStream.getRecorder().toString();
 		chunkId = kStream.getChunkId();
 		type = kStream.getType();
diff --git a/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/RoomPanel.java b/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/RoomPanel.java
index 1f01acd62..f4bebbb9d 100644
--- a/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/RoomPanel.java
+++ b/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/RoomPanel.java
@@ -26,7 +26,6 @@ import static org.apache.openmeetings.util.OmFileHelper.EXTENSION_PDF;
 import static org.apache.openmeetings.web.app.WebSession.getDateFormat;
 import static org.apache.openmeetings.web.app.WebSession.getUserId;
 import static org.apache.openmeetings.web.room.wb.WbPanel.WB_JS_REFERENCE;
-import static org.apache.openmeetings.mediaserver.KurentoHandler.activityAllowed;
 
 import java.io.IOException;
 import java.nio.file.Files;
@@ -144,7 +143,6 @@ public class RoomPanel extends BasePanel {
 		}
 	}
 	private final Room r;
-	private final boolean interview;
 	private final WebMarkupContainer room = new WebMarkupContainer("roomContainer");
 	private final AbstractDefaultAjaxBehavior roomEnter = new AbstractDefaultAjaxBehavior() {
 		private static final long serialVersionUID = 1L;
@@ -158,7 +156,7 @@ public class RoomPanel extends BasePanel {
 					.put("uid", c.getUid())
 					.put("userId", c.getUserId())
 					.put("rights", c.toJson(true).getJSONArray("rights"))
-					.put("interview", interview)
+					.put("interview", r.isInterview())
 					.put("audioOnly", r.isAudioOnly())
 					.put("allowRecording", r.isAllowRecording())
 					.put("questions", r.isAllowUserQuestions())
@@ -212,7 +210,7 @@ public class RoomPanel extends BasePanel {
 			if (streams.length() > 0) {
 				sb.append("VideoManager.play(").append(streams).append(", ").append(kHandler.getTurnServers(getClient())).append(");");
 			}
-			if (interview && streamProcessor.recordingAllowed(getClient())) {
+			if (r.isInterview() && streamProcessor.recordingAllowed(getClient())) {
 				sb.append("WbArea.setRecEnabled(true);");
 			}
 			if (!Strings.isEmpty(sb)) {
@@ -280,8 +278,7 @@ public class RoomPanel extends BasePanel {
 	public RoomPanel(String id, Room r) {
 		super(id);
 		this.r = r;
-		this.interview = Room.Type.INTERVIEW == r.getType();
-		this.wb = interview ? new InterviewWbPanel("whiteboard", this) : new WbPanel("whiteboard", this);
+		this.wb = r.isInterview() ? new InterviewWbPanel("whiteboard", this) : new WbPanel("whiteboard", this);
 	}
 
 	public void startDownload(IPartialPageRequestHandler handler, String type, String fuid) {
@@ -300,7 +297,7 @@ public class RoomPanel extends BasePanel {
 		room.setOutputMarkupPlaceholderTag(true);
 		room.add(menu = new RoomMenuPanel("menu", this));
 		room.add(AttributeModifier.append("data-room-id", r.getId()));
-		if (interview) {
+		if (r.isInterview()) {
 			room.add(new WebMarkupContainer("wb-area").add(wb));
 		} else {
 			Droppable<BaseFileItem> wbArea = new Droppable<>("wb-area") {
@@ -629,7 +626,7 @@ public class RoomPanel extends BasePanel {
 
 	private void updateInterviewRecordingButtons(IPartialPageRequestHandler handler) {
 		Client curClient = getClient();
-		if (interview && curClient.hasRight(Right.MODERATOR)) {
+		if (r.isInterview() && curClient.hasRight(Right.MODERATOR)) {
 			if (streamProcessor.isRecording(r.getId())) {
 				handler.appendJavaScript("if (typeof(WbArea) === 'object') {WbArea.setRecStarted(true);}");
 			} else if (streamProcessor.recordingAllowed(getClient())) {
@@ -761,12 +758,14 @@ public class RoomPanel extends BasePanel {
 		for (Right right : rights) {
 			client.deny(right);
 		}
-		if (client.hasActivity(Client.Activity.AUDIO) && !client.hasRight(Right.AUDIO)) {
-			client.remove(Client.Activity.AUDIO);
-		}
-		if (client.hasActivity(Client.Activity.VIDEO) && !client.hasRight(Right.VIDEO)) {
-			client.remove(Client.Activity.VIDEO);
-		}
+		client.getCamStreams().forEach(sd -> {
+			if (sd.has(Client.Activity.AUDIO) && !client.hasRight(Right.AUDIO)) {
+				sd.remove(Client.Activity.AUDIO);
+			}
+			if (sd.has(Client.Activity.VIDEO) && !client.hasRight(Right.VIDEO)) {
+				sd.remove(Client.Activity.VIDEO);
+			}
+		});
 		rightsUpdated(client);
 	}
 
@@ -795,10 +794,10 @@ public class RoomPanel extends BasePanel {
 				if (!avInited) {
 					avInited = true;
 					if (Room.Type.CONFERENCE == r.getType()) {
-						if (!activityAllowed(c, Client.Activity.AUDIO, c.getRoom())) {
+						if (!c.isAllowed(Client.Activity.AUDIO)) {
 							c.allow(Room.Right.AUDIO);
 						}
-						if (!c.getRoom().isAudioOnly() && !activityAllowed(c, Client.Activity.VIDEO, c.getRoom())) {
+						if (!c.getRoom().isAudioOnly() && !c.isAllowed(Client.Activity.VIDEO)) {
 							c.allow(Room.Right.VIDEO);
 						}
 						streamProcessor.onToggleActivity(c, c.getRoom().isAudioOnly()
@@ -845,7 +844,7 @@ public class RoomPanel extends BasePanel {
 	}
 
 	public boolean isInterview() {
-		return interview;
+		return r.isInterview();
 	}
 
 	private void createWaitModerator(final boolean autoopen) {
diff --git a/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/sidebar/RoomSidebar.java b/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/sidebar/RoomSidebar.java
index c7f5146a4..480d85da6 100644
--- a/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/sidebar/RoomSidebar.java
+++ b/openmeetings-web/src/main/java/org/apache/openmeetings/web/room/sidebar/RoomSidebar.java
@@ -175,7 +175,7 @@ public class RoomSidebar extends Panel {
 
 	private void muteRoomAction(String uid, Client self, JSONObject o) {
 		Client c = cm.get(uid);
-		if (c == null || !c.hasActivity(Client.Activity.AUDIO)) {
+		if (c == null || !c.has(Client.Activity.AUDIO)) {
 			return;
 		}
 		if (self.hasRight(Right.MODERATOR) || self.getUid().equals(c.getUid())) {


[openmeetings] 01/03: [OPENMEETINGS-2253] RTC related JS code is simplified; deprecated kurento-utils-js is dropped

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

solomax pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/openmeetings.git

commit 92a8a51a6943c5fc673189e7284ca7ede8111f35
Author: Maxim Solodovnik <so...@gmail.com>
AuthorDate: Sun Nov 27 11:56:54 2022 +0700

    [OPENMEETINGS-2253] RTC related JS code is simplified; deprecated kurento-utils-js is dropped
---
 openmeetings-web/src/main/front/room/src/video.js  | 214 +++-----
 .../src/main/front/settings/package.json           |   5 +-
 .../src/main/front/settings/src/WebRtcPeer.js      | 592 +++++++++++++++++++++
 .../src/main/front/settings/src/index.js           |   9 +-
 .../src/main/front/settings/src/mic-level.js       |   6 +-
 .../src/main/front/settings/src/settings.js        | 140 ++---
 .../src/main/front/settings/src/video-util.js      |  23 +-
 7 files changed, 745 insertions(+), 244 deletions(-)

diff --git a/openmeetings-web/src/main/front/room/src/video.js b/openmeetings-web/src/main/front/room/src/video.js
index 676c9d551..3477c4bae 100644
--- a/openmeetings-web/src/main/front/room/src/video.js
+++ b/openmeetings-web/src/main/front/room/src/video.js
@@ -105,9 +105,7 @@ module.exports = class Video {
 								data.aDest = data.aCtx.createMediaStreamDestination();
 								data.analyser.connect(data.aDest);
 								_stream = data.aDest.stream;
-								stream.getVideoTracks().forEach(function(track) {
-									_stream.addTrack(track);
-								});
+								stream.getVideoTracks().forEach(track => _stream.addTrack(track));
 							}
 						}
 						state.data = data;
@@ -131,86 +129,69 @@ module.exports = class Video {
 					});
 			});
 		}
-		function __attachListener(state) {
-			if (!state.disposed && state.data.rtcPeer) {
-				const pc = state.data.rtcPeer.peerConnection;
-				pc.onconnectionstatechange = function(event) {
-					console.warn(`!!RTCPeerConnection state changed: ${pc.connectionState}, user: ${sd.user.displayName}, uid: ${sd.uid}`);
-					switch(pc.connectionState) {
-						case "connected":
-							if (sd.self) {
-								// The connection has become fully connected
-								OmUtil.alert('info', `Connection to Media server has been established`, 3000);//notify user
-							}
-							break;
-						case "disconnected":
-						case "failed":
-							//connection has been dropped
-							OmUtil.alert('warning', `Media server connection for user ${sd.user.displayName} is ${pc.connectionState}, will try to re-connect`, 3000);//notify user
-							_refresh();
-							break;
-						case "closed":
-							// The connection has been closed
-							break;
+		function __connectionStateChangeListener(state) {
+			const pc = state.data.rtcPeer.pc;
+			console.warn(`!!RTCPeerConnection state changed: ${pc.connectionState}, user: ${sd.user.displayName}, uid: ${sd.uid}`);
+			switch(pc.connectionState) {
+				case "connected":
+					if (sd.self) {
+						// The connection has become fully connected
+						OmUtil.alert('info', `Connection to Media server has been established`, 3000);//notify user
 					}
-				}
+					break;
+				case "disconnected":
+				case "failed":
+					//connection has been dropped
+					OmUtil.alert('warning', `Media server connection for user ${sd.user.displayName} is ${pc.connectionState}, will try to re-connect`, 3000);//notify user
+					_refresh();
+					break;
+				case "closed":
+					// The connection has been closed
+					break;
 			}
 		}
 		function __createSendPeer(msg, state, cnts) {
 			state.options = {
-				videoStream: state.stream
+				mediaStream: state.stream
 				, mediaConstraints: cnts
-				, onicecandidate: self.onIceCandidate
+				, onIceCandidate: self.onIceCandidate
+				, onConnectionStateChange: () => __connectionStateChangeListener(state)
 			};
-			if (!isSharing) {
-				state.options.localVideo = __getVideo(state);
-			}
+			const vid = __getVideo(state);
+			vid.srcObject = state.stream;
+
 			const data = state.data;
-			data.rtcPeer = new kurentoUtils.WebRtcPeer.WebRtcPeerSendonly(
-				VideoUtil.addIceServers(state.options, msg)
-				, function (error) {
-					if (state.disposed || true === data.rtcPeer.cleaned) {
-						return;
+			data.rtcPeer = new WebRtcPeerSendonly(VideoUtil.addIceServers(state.options, msg));
+			if (data.analyser) {
+				level = new MicLevel();
+				level.meter(data.analyser, lm, _micActivity, OmUtil.error);
+			}
+			data.rtcPeer.createOffer()
+				.then(sdpOffer => {
+					data.rtcPeer.processLocalOffer(sdpOffer);
+					OmUtil.log('Invoking Sender SDP offer callback function');
+					const bmsg = {
+							id : 'broadcastStarted'
+							, uid: sd.uid
+							, sdpOffer: sdpOffer.sdp
+						}, vtracks = state.stream.getVideoTracks();
+					if (vtracks && vtracks.length > 0) {
+						const vts = vtracks[0].getSettings();
+						vidSize.width = vts.width;
+						vidSize.height = vts.height;
+						bmsg.width = vts.width;
+						bmsg.height = vts.height;
+						bmsg.fps = vts.frameRate;
 					}
-					if (error) {
-						return OmUtil.error(error);
+					VideoMgrUtil.sendMessage(bmsg);
+					if (isSharing) {
+						Sharer.setShareState(Sharer.SHARE_STARTED);
 					}
-					if (data.analyser) {
-						level = new MicLevel();
-						level.meter(data.analyser, lm, _micActivity, OmUtil.error);
+					if (isRecording) {
+						Sharer.setRecState(Sharer.SHARE_STARTED);
 					}
-					data.rtcPeer.generateOffer(function(genErr, offerSdp) {
-						if (state.disposed || true === data.rtcPeer.cleaned) {
-							return;
-						}
-						if (genErr) {
-							return OmUtil.error('Sender sdp offer error ' + genErr);
-						}
-						OmUtil.log('Invoking Sender SDP offer callback function');
-						const bmsg = {
-								id : 'broadcastStarted'
-								, uid: sd.uid
-								, sdpOffer: offerSdp
-							}, vtracks = state.stream.getVideoTracks();
-						if (vtracks && vtracks.length > 0) {
-							const vts = vtracks[0].getSettings();
-							vidSize.width = vts.width;
-							vidSize.height = vts.height;
-							bmsg.width = vts.width;
-							bmsg.height = vts.height;
-							bmsg.fps = vts.frameRate;
-						}
-						VideoMgrUtil.sendMessage(bmsg);
-						if (isSharing) {
-							Sharer.setShareState(Sharer.SHARE_STARTED);
-						}
-						if (isRecording) {
-							Sharer.setRecState(Sharer.SHARE_STARTED);
-						}
-					});
-				});
-			data.rtcPeer.cleaned = false;
-			__attachListener(state);
+				})
+				.catch(error => OmUtil.error(error));
 		}
 		function _createSendPeer(msg, state) {
 			if (isSharing || isRecording) {
@@ -222,36 +203,23 @@ module.exports = class Video {
 		function _createResvPeer(msg, state) {
 			__createVideo(state);
 			const options = VideoUtil.addIceServers({
-				remoteVideo : __getVideo(state)
-				, onicecandidate : self.onIceCandidate
+				mediaConstraints: {audio: true, video: true}
+				, onIceCandidate : self.onIceCandidate
+				, onConnectionStateChange: () => __connectionStateChangeListener(state)
 			}, msg);
 			const data = state.data;
-			data.rtcPeer = new kurentoUtils.WebRtcPeer.WebRtcPeerRecvonly(
-				options
-				, function(error) {
-					if (state.disposed || true === data.rtcPeer.cleaned) {
-						return;
-					}
-					if (error) {
-						return OmUtil.error(error);
-					}
-					data.rtcPeer.generateOffer(function(genErr, offerSdp) {
-						if (state.disposed || true === data.rtcPeer.cleaned) {
-							return;
-						}
-						if (genErr) {
-							return OmUtil.error('Receiver sdp offer error ' + genErr);
-						}
-						OmUtil.log('Invoking Receiver SDP offer callback function');
-						VideoMgrUtil.sendMessage({
-							id : 'addListener'
-							, sender: sd.uid
-							, sdpOffer: offerSdp
-						});
+			data.rtcPeer = new WebRtcPeerRecvonly(options);
+			data.rtcPeer.createOffer()
+				.then(sdpOffer => {
+					data.rtcPeer.processLocalOffer(sdpOffer);
+					OmUtil.log('Invoking Receiver SDP offer callback function');
+					VideoMgrUtil.sendMessage({
+						id : 'addListener'
+						, sender: sd.uid
+						, sdpOffer: sdpOffer.sdp
 					});
-				});
-			data.rtcPeer.cleaned = false;
-			__attachListener(state);
+				})
+				.catch(genErr => OmUtil.error('Receiver sdp offer error ' + genErr));
 		}
 		function _handleMicStatus(state) {
 			if (!footer || !footer.is(':visible')) {
@@ -513,7 +481,6 @@ module.exports = class Video {
 					delete state.options.videoStream;
 					delete state.options.mediaConstraints;
 					delete state.options.onicecandidate;
-					delete state.options.localVideo;
 					state.options = null;
 				}
 				_cleanData(state.data);
@@ -557,7 +524,7 @@ module.exports = class Video {
 					const data = state.data
 						, videoEl = state.video[0];
 					if (data.rtcPeer && (!videoEl.srcObject || !videoEl.srcObject.active)) {
-						videoEl.srcObject = sd.self ? data.rtcPeer.getLocalStream() : data.rtcPeer.getRemoteStream();
+						videoEl.srcObject = data.rtcPeer.stream;
 					}
 				}
 			});
@@ -567,39 +534,30 @@ module.exports = class Video {
 			if (!state || state.disposed || !state.data.rtcPeer || state.data.rtcPeer.cleaned) {
 				return;
 			}
-			state.data.rtcPeer.processAnswer(answer, function (error) {
-				if (true === this.cleaned) {
-					return;
-				}
-				const video = __getVideo(state);
-				if (this.peerConnection.signalingState === 'stable' && video && video.paused) {
-					video.play().catch(function (err) {
-						if ('NotAllowedError' === err.name) {
-							VideoUtil.askPermission(function () {
-								video.play();
-							});
-						}
-					});
-					return;
-				}
-				if (error) {
-					OmUtil.error(error, true);
-				}
-			});
+			state.data.rtcPeer.processRemoteAnswer(answer)
+				.then(() => {
+					const video = __getVideo(state);
+					const rStream = state.data.rtcPeer.pc.getRemoteStreams()[0];
+					if (rStream) {
+						video.srcObject = rStream;
+					}
+					if (state.data.rtcPeer.pc.signalingState === 'stable' && video && video.paused) {
+						video.play().catch(err => {
+							if ('NotAllowedError' === err.name) {
+								VideoUtil.askPermission(() => video.play());
+							}
+						});
+					}
+				})
+				.catch(error => OmUtil.error(error, true));
 		}
 		function _processIceCandidate(candidate) {
 			const state = states.length > 0 ? states[0] : null;
 			if (!state || state.disposed || !state.data.rtcPeer || state.data.rtcPeer.cleaned) {
 				return;
 			}
-			state.data.rtcPeer.addIceCandidate(candidate, function (error) {
-				if (true === this.cleaned) {
-					return;
-				}
-				if (error) {
-					OmUtil.error('Error adding candidate: ' + error, true);
-				}
-			});
+			state.data.rtcPeer.addIceCandidate(candidate)
+				.catch(error => OmUtil.error('Error adding candidate: ' + error, true));
 		}
 		function _init(_msg) {
 			sd = _msg.stream;
diff --git a/openmeetings-web/src/main/front/settings/package.json b/openmeetings-web/src/main/front/settings/package.json
index c45a30778..8437ecc37 100644
--- a/openmeetings-web/src/main/front/settings/package.json
+++ b/openmeetings-web/src/main/front/settings/package.json
@@ -16,7 +16,8 @@
     "tinyify": "^3.1.0"
   },
   "dependencies": {
-    "adapterjs": "^0.15.5",
-    "kurento-utils": "^6.16.0"
+    "freeice": "2.2.2",
+    "uuid": "^9.0.0",
+    "webrtc-adapter": "^8.2.0"
   }
 }
diff --git a/openmeetings-web/src/main/front/settings/src/WebRtcPeer.js b/openmeetings-web/src/main/front/settings/src/WebRtcPeer.js
new file mode 100644
index 000000000..d40d4b015
--- /dev/null
+++ b/openmeetings-web/src/main/front/settings/src/WebRtcPeer.js
@@ -0,0 +1,592 @@
+/*
+ * (C) Copyright 2017-2022 OpenVidu (https://openvidu.io)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+// taken from here:
+// https://github.com/OpenVidu/openvidu/blob/master/openvidu-browser/src/OpenViduInternal/WebRtcPeer/WebRtcPeer.ts
+// and monkey-patched
+
+const freeice = require('freeice');
+
+const ExceptionEventName = {
+	/**
+	 * The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState)
+	 * of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `failed` status.
+	 *
+	 * This is a terminal error that won't have any kind of possible recovery. If the client is still connected to OpenVidu Server,
+	 * then an automatic reconnection process of the media stream is immediately performed. If the ICE connection has broken due to
+	 * a total network drop, then no automatic reconnection process will be possible.
+	 *
+	 * {@link ExceptionEvent} objects with this {@link ExceptionEvent.name} will have as {@link ExceptionEvent.origin} property a {@link Stream} object.
+	 */
+	 ICE_CONNECTION_FAILED: 'ICE_CONNECTION_FAILED',
+
+	/**
+	 * The [ICE connection state](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/iceConnectionState)
+	 * of an [RTCPeerConnection](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection) reached `disconnected` status.
+	 *
+	 * This is not a terminal error, and it is possible for the ICE connection to be reconnected. If the client is still connected to
+	 * OpenVidu Server and after certain timeout the ICE connection has not reached a success or terminal status, then an automatic
+	 * reconnection process of the media stream is performed. If the ICE connection has broken due to a total network drop, then no
+	 * automatic reconnection process will be possible.
+	 *
+	 * You can customize the timeout for the reconnection attempt with property {@link OpenViduAdvancedConfiguration.iceConnectionDisconnectedExceptionTimeout},
+	 * which by default is 4000 milliseconds.
+	 *
+	 * {@link ExceptionEvent} objects with this {@link ExceptionEvent.name} will have as {@link ExceptionEvent.origin} property a {@link Stream} object.
+	 */
+	 ICE_CONNECTION_DISCONNECTED: 'ICE_CONNECTION_DISCONNECTED',
+};
+
+class WebRtcPeer {
+	constructor(configuration) {
+		this.remoteCandidatesQueue = [];
+		this.localCandidatesQueue = [];
+		this.iceCandidateList = [];
+		this.candidategatheringdone = false;
+
+		// Same as WebRtcPeerConfiguration but without optional fields.
+		this.configuration = {
+			...configuration,
+			iceServers: !!configuration.iceServers && configuration.iceServers.length > 0 ? configuration.iceServers : freeice(),
+			mediaStream: configuration.mediaStream !== undefined ? configuration.mediaStream : null,
+			mode: !!configuration.mode ? configuration.mode : 'sendrecv',
+			id: !!configuration.id ? configuration.id : this.generateUniqueId()
+		};
+		// prettier-ignore
+		OmUtil.log(`[WebRtcPeer] configuration:\n${JSON.stringify(this.configuration, null, 2)}`);
+
+		this.pc = new RTCPeerConnection({ iceServers: this.configuration.iceServers });
+
+		this._iceCandidateListener = (event) => {
+			if (event.candidate !== null) {
+				// `RTCPeerConnectionIceEvent.candidate` is supposed to be an RTCIceCandidate:
+				// https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnectioniceevent-candidate
+				//
+				// But in practice, it is actually an RTCIceCandidateInit that can be used to
+				// obtain a proper candidate, using the RTCIceCandidate constructor:
+				// https://w3c.github.io/webrtc-pc/#dom-rtcicecandidate-constructor
+				const candidateInit = event.candidate;
+				const iceCandidate = new RTCIceCandidate(candidateInit);
+
+				this.configuration.onIceCandidate(iceCandidate);
+				if (iceCandidate.candidate !== '') {
+					this.localCandidatesQueue.push(iceCandidate);
+				}
+			}
+		};
+		this.pc.addEventListener('icecandidate', this._iceCandidateListener);
+
+		this._signalingStateChangeListener = () => {
+			if (this.pc.signalingState === 'stable') {
+				// SDP Offer/Answer finished. Add stored remote candidates.
+				while (this.iceCandidateList.length > 0) {
+					let candidate = this.iceCandidateList.shift();
+					this.pc.addIceCandidate(candidate);
+				}
+			}
+		};
+		this.pc.addEventListener('signalingstatechange', this._signalingStateChangeListener);
+		if (this.configuration.onConnectionStateChange) {
+			this.pc.addEventListener('connectionstatechange', this.configuration.onConnectionStateChange);
+		}
+	}
+
+	getId() {
+		return this.configuration.id;
+	}
+
+	/**
+	 * This method frees the resources used by WebRtcPeer
+	 */
+	dispose() {
+		OmUtil.log('Disposing WebRtcPeer');
+		if (this.pc) {
+			if (this.pc.signalingState === 'closed') {
+				return;
+			}
+			this.pc.removeEventListener('icecandidate', this._iceCandidateListener);
+			this._iceCandidateListener = undefined;
+			this.pc.removeEventListener('signalingstatechange', this._signalingStateChangeListener);
+			this._signalingStateChangeListener = undefined;
+			if (this._iceConnectionStateChangeListener) {
+				this.pc.removeEventListener('iceconnectionstatechange', this._iceConnectionStateChangeListener);
+			}
+			if (this.configuration.onConnectionStateChange) {
+				this.pc.removeEventListener('connectionstatechange', this.configuration.onConnectionStateChange);
+			}
+				this.configuration = {};
+			this.pc.close();
+			this.remoteCandidatesQueue = [];
+			this.localCandidatesQueue = [];
+		}
+	}
+
+	/**
+	 * Creates an SDP offer from the local RTCPeerConnection to send to the other peer.
+	 * Only if the negotiation was initiated by this peer.
+	 */
+	async createOffer() {
+		// TODO: Delete this conditional when all supported browsers are
+		// modern enough to implement the Transceiver methods.
+		if (!('addTransceiver' in this.pc)) {
+			OmUtil.error(
+				'[createOffer] Method RTCPeerConnection.addTransceiver() is NOT available; using LEGACY offerToReceive{Audio,Video}'
+			);
+			return this.createOfferLegacy();
+		} else {
+			OmUtil.log('[createOffer] Method RTCPeerConnection.addTransceiver() is available; using it');
+		}
+
+		// Spec doc: https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver
+
+		if (this.configuration.mode !== 'recvonly') {
+			// To send media, assume that all desired media tracks have been
+			// already added by higher level code to our MediaStream.
+
+			if (!this.configuration.mediaStream) {
+				throw new Error(
+					`[WebRtcPeer.createOffer] Direction is '${this.configuration.mode}', but no stream was configured to be sent`
+				);
+			}
+
+			for (const track of this.configuration.mediaStream.getTracks()) {
+				const tcInit = {
+					direction: this.configuration.mode,
+					streams: [this.configuration.mediaStream]
+				};
+
+				if (track.kind === 'video' && this.configuration.simulcast) {
+					// Check if the requested size is enough to ask for 3 layers.
+					const trackSettings = track.getSettings();
+					const trackConsts = track.getConstraints();
+
+					const trackWidth = typeof(trackSettings.width) === 'object' ? trackConsts.width.ideal : trackConsts.width || 0;
+					const trackHeight = typeof(trackSettings.height) === 'object' ? trackConsts.height.ideal : trackConsts.height || 0;
+					OmUtil.info(`[createOffer] Video track dimensions: ${trackWidth}x${trackHeight}`);
+
+					const trackPixels = trackWidth * trackHeight;
+					let maxLayers = 0;
+					if (trackPixels >= 960 * 540) {
+						maxLayers = 3;
+					} else if (trackPixels >= 480 * 270) {
+						maxLayers = 2;
+					} else {
+						maxLayers = 1;
+					}
+
+					tcInit.sendEncodings = [];
+					for (let l = 0; l < maxLayers; l++) {
+						const layerDiv = 2 ** (maxLayers - l - 1);
+
+						const encoding = {
+							rid: 'rdiv' + layerDiv.toString(),
+
+							// @ts-ignore -- Property missing from DOM types.
+							scalabilityMode: 'L1T1'
+						};
+
+						if (['detail', 'text'].includes(track.contentHint)) {
+							// Prioritize best resolution, for maximum picture detail.
+							encoding.scaleResolutionDownBy = 1.0;
+
+							// @ts-ignore -- Property missing from DOM types.
+							encoding.maxFramerate = Math.floor(30 / layerDiv);
+						} else {
+							encoding.scaleResolutionDownBy = layerDiv;
+						}
+
+						tcInit.sendEncodings.push(encoding);
+					}
+				}
+
+				const tc = this.pc.addTransceiver(track, tcInit);
+
+				if (track.kind === 'video') {
+					let sendParams = tc.sender.getParameters();
+					let needSetParams = false;
+
+					if (sendParams.degradationPreference && !sendParams.degradationPreference.length) {
+						// degradationPreference for video: "balanced", "maintain-framerate", "maintain-resolution".
+						// https://www.w3.org/TR/2018/CR-webrtc-20180927/#dom-rtcdegradationpreference
+						if (['detail', 'text'].includes(track.contentHint)) {
+							sendParams.degradationPreference = 'maintain-resolution';
+						} else {
+							sendParams.degradationPreference = 'balanced';
+						}
+
+						OmUtil.info(`[createOffer] Video sender Degradation Preference set: ${sendParams.degradationPreference}`);
+
+						// FIXME: Firefox implements degradationPreference on each individual encoding!
+						// (set it on every element of the sendParams.encodings array)
+
+						needSetParams = true;
+					}
+
+					// FIXME: Check that the simulcast encodings were applied.
+					// Firefox doesn't implement `RTCRtpTransceiverInit.sendEncodings`
+					// so the only way to enable simulcast is with `RTCRtpSender.setParameters()`.
+					//
+					// This next block can be deleted when Firefox fixes bug #1396918:
+					// https://bugzilla.mozilla.org/show_bug.cgi?id=1396918
+					//
+					// NOTE: This is done in a way that is compatible with all browsers, to save on
+					// browser-conditional code. The idea comes from WebRTC Adapter.js:
+					// * https://github.com/webrtcHacks/adapter/issues/998
+					// * https://github.com/webrtcHacks/adapter/blob/v7.7.0/src/js/firefox/firefox_shim.js#L231-L255
+					if (this.configuration.simulcast) {
+						if (sendParams.encodings.length !== tcInit.sendEncodings.length) {
+							sendParams.encodings = tcInit.sendEncodings;
+
+							needSetParams = true;
+						}
+					}
+
+					if (needSetParams) {
+						OmUtil.log(`[createOffer] Setting new RTCRtpSendParameters to video sender`);
+						try {
+							await tc.sender.setParameters(sendParams);
+						} catch (error) {
+							let message = `[WebRtcPeer.createOffer] Cannot set RTCRtpSendParameters to video sender`;
+							if (error instanceof Error) {
+								message += `: ${error.message}`;
+							}
+							throw new Error(message);
+						}
+					}
+				}
+			}
+		} else {
+			// To just receive media, create new recvonly transceivers.
+			for (const kind of ['audio', 'video']) {
+				// Check if the media kind should be used.
+				if (!this.configuration.mediaConstraints[kind]) {
+					continue;
+				}
+
+				this.configuration.mediaStream = new MediaStream();
+				this.pc.addTransceiver(kind, {
+					direction: this.configuration.mode,
+					streams: [this.configuration.mediaStream]
+				});
+			}
+		}
+
+		let sdpOffer;
+		try {
+			sdpOffer = await this.pc.createOffer();
+		} catch (error) {
+			let message = `[WebRtcPeer.createOffer] Browser failed creating an SDP Offer`;
+			if (error instanceof Error) {
+				message += `: ${error.message}`;
+			}
+			throw new Error(message);
+		}
+
+		return sdpOffer;
+	}
+
+	/**
+	 * Creates an SDP answer from the local RTCPeerConnection to send to the other peer
+	 * Only if the negotiation was initiated by the other peer
+	 */
+	createAnswer() {
+		return new Promise((resolve, reject) => {
+			// TODO: Delete this conditional when all supported browsers are
+			// modern enough to implement the Transceiver methods.
+			if ('getTransceivers' in this.pc) {
+				OmUtil.log('[createAnswer] Method RTCPeerConnection.getTransceivers() is available; using it');
+
+				// Ensure that the PeerConnection already contains one Transceiver
+				// for each kind of media.
+				// The Transceivers should have been already created internally by
+				// the PC itself, when `pc.setRemoteDescription(sdpOffer)` was called.
+
+				for (const kind of ['audio', 'video']) {
+					// Check if the media kind should be used.
+					if (!this.configuration.mediaConstraints[kind]) {
+						continue;
+					}
+
+					let tc = this.pc.getTransceivers().find((tc) => tc.receiver.track.kind === kind);
+
+					if (tc) {
+						// Enforce our desired direction.
+						tc.direction = this.configuration.mode;
+					} else {
+						return reject(new Error(`${kind} requested, but no transceiver was created from remote description`));
+					}
+				}
+
+				this.pc
+					.createAnswer()
+					.then((sdpAnswer) => resolve(sdpAnswer))
+					.catch((error) => reject(error));
+			} else {
+				// TODO: Delete else branch when all supported browsers are
+				// modern enough to implement the Transceiver methods
+
+				let offerAudio,
+					offerVideo = true;
+				if (!!this.configuration.mediaConstraints) {
+					offerAudio =
+						typeof this.configuration.mediaConstraints.audio === 'boolean' ? this.configuration.mediaConstraints.audio : true;
+					offerVideo =
+						typeof this.configuration.mediaConstraints.video === 'boolean' ? this.configuration.mediaConstraints.video : true;
+					const constraints = {
+						offerToReceiveAudio: offerAudio,
+						offerToReceiveVideo: offerVideo
+					};
+					(this.pc).createAnswer(constraints)
+						.then((sdpAnswer) => resolve(sdpAnswer))
+						.catch((error) => reject(error));
+				}
+			}
+
+			// else, there is nothing to do; the legacy createAnswer() options do
+			// not offer any control over which tracks are included in the answer.
+		});
+	}
+
+	/**
+	 * This peer initiated negotiation. Step 1/4 of SDP offer-answer protocol
+	 */
+	processLocalOffer(offer) {
+		return new Promise((resolve, reject) => {
+			this.pc
+				.setLocalDescription(offer)
+				.then(() => {
+					const localDescription = this.pc.localDescription;
+					if (!!localDescription) {
+						OmUtil.log('Local description set', localDescription.sdp);
+						return resolve();
+					} else {
+						return reject('Local description is not defined');
+					}
+				})
+				.catch((error) => reject(error));
+		});
+	}
+
+	/**
+	 * Other peer initiated negotiation. Step 2/4 of SDP offer-answer protocol
+	 */
+	processRemoteOffer(sdpOffer) {
+		return new Promise((resolve, reject) => {
+			const offer = {
+				type: 'offer',
+				sdp: sdpOffer
+			};
+			OmUtil.log('SDP offer received, setting remote description', offer);
+
+			if (this.pc.signalingState === 'closed') {
+				return reject('RTCPeerConnection is closed when trying to set remote description');
+			}
+			this.setRemoteDescription(offer)
+				.then(() => resolve())
+				.catch((error) => reject(error));
+		});
+	}
+
+	/**
+	 * Other peer initiated negotiation. Step 3/4 of SDP offer-answer protocol
+	 */
+	processLocalAnswer(answer) {
+		return new Promise((resolve, reject) => {
+			OmUtil.log('SDP answer created, setting local description');
+			if (this.pc.signalingState === 'closed') {
+				return reject('RTCPeerConnection is closed when trying to set local description');
+			}
+			this.pc
+				.setLocalDescription(answer)
+				.then(() => resolve())
+				.catch((error) => reject(error));
+		});
+	}
+
+	/**
+	 * This peer initiated negotiation. Step 4/4 of SDP offer-answer protocol
+	 */
+	processRemoteAnswer(sdpAnswer) {
+		return new Promise((resolve, reject) => {
+			const answer = {
+				type: 'answer',
+				sdp: sdpAnswer
+			};
+			OmUtil.log('SDP answer received, setting remote description');
+
+			if (this.pc.signalingState === 'closed') {
+				return reject('RTCPeerConnection is closed when trying to set remote description');
+			}
+			this.setRemoteDescription(answer)
+				.then(() => {
+					resolve();
+				})
+				.catch((error) => reject(error));
+		});
+	}
+
+	/**
+	 * @hidden
+	 */
+	async setRemoteDescription(sdp) {
+		return this.pc.setRemoteDescription(sdp);
+	}
+
+	/**
+	 * Callback function invoked when an ICE candidate is received
+	 */
+	addIceCandidate(iceCandidate) {
+		return new Promise((resolve, reject) => {
+			OmUtil.log('Remote ICE candidate received', iceCandidate);
+			this.remoteCandidatesQueue.push(iceCandidate);
+			switch (this.pc.signalingState) {
+				case 'closed':
+					reject(new Error('PeerConnection object is closed'));
+					break;
+				case 'stable':
+					if (!!this.pc.remoteDescription) {
+						this.pc
+							.addIceCandidate(iceCandidate)
+							.then(() => resolve())
+							.catch((error) => reject(error));
+					} else {
+						this.iceCandidateList.push(iceCandidate);
+						resolve();
+					}
+					break;
+				default:
+					this.iceCandidateList.push(iceCandidate);
+					resolve();
+			}
+		});
+	}
+
+	addIceConnectionStateChangeListener(otherId) {
+		if (!this._iceConnectionStateChangeListener) {
+			this._iceConnectionStateChangeListener = () => {
+				const iceConnectionState = this.pc.iceConnectionState;
+				switch (iceConnectionState) {
+					case 'disconnected':
+						// Possible network disconnection
+						const msg1 =
+							'IceConnectionState of RTCPeerConnection ' +
+							this.configuration.id +
+							' (' +
+							otherId +
+							') change to "disconnected". Possible network disconnection';
+						logger.warn(msg1);
+						this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_DISCONNECTED, msg1);
+						break;
+					case 'failed':
+						const msg2 = 'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') to "failed"';
+						logger.error(msg2);
+						this.configuration.onIceConnectionStateException(ExceptionEventName.ICE_CONNECTION_FAILED, msg2);
+						break;
+					case 'closed':
+						OmUtil.log(
+							'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "closed"'
+						);
+						break;
+					case 'new':
+						OmUtil.log('IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "new"');
+						break;
+					case 'checking':
+						logger.log(
+							'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "checking"'
+						);
+						break;
+					case 'connected':
+						logger.log(
+							'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "connected"'
+						);
+						break;
+					case 'completed':
+						logger.log(
+							'IceConnectionState of RTCPeerConnection ' + this.configuration.id + ' (' + otherId + ') change to "completed"'
+						);
+						break;
+				}
+			};
+		}
+		this.pc.addEventListener('iceconnectionstatechange', this._iceConnectionStateChangeListener);
+	}
+
+	/**
+	 * @hidden
+	 */
+	generateUniqueId() {
+		return uuidv4();
+	}
+
+	get stream() {
+		return this.pc.getLocalStreams()[0] || this.pc.getRemoteStreams()[0];
+	}
+
+	// LEGACY code
+	deprecatedPeerConnectionTrackApi() {
+		for (const track of this.configuration.mediaStream.getTracks()) {
+			this.pc.addTrack(track, this.configuration.mediaStream);
+		}
+	}
+
+	// DEPRECATED LEGACY METHOD: Old WebRTC versions don't implement
+	// Transceivers, and instead depend on the deprecated
+	// "offerToReceiveAudio" and "offerToReceiveVideo".
+	createOfferLegacy() {
+		if (!!this.configuration.mediaStream) {
+			this.deprecatedPeerConnectionTrackApi();
+		}
+
+		const hasAudio = this.configuration.mediaConstraints.audio;
+		const hasVideo = this.configuration.mediaConstraints.video;
+
+		const options = {
+			offerToReceiveAudio: this.configuration.mode !== 'sendonly' && hasAudio,
+			offerToReceiveVideo: this.configuration.mode !== 'sendonly' && hasVideo
+		};
+
+		OmUtil.log('[createOfferLegacy] RTCPeerConnection.createOffer() options:', JSON.stringify(options));
+
+		return this.pc.createOffer(options);
+	}
+}
+
+class WebRtcPeerRecvonly extends WebRtcPeer {
+	constructor(configuration) {
+		configuration.mode = 'recvonly';
+		super(configuration);
+	}
+};
+
+class WebRtcPeerSendonly extends WebRtcPeer {
+	constructor(configuration) {
+		configuration.mode = 'sendonly';
+		super(configuration);
+	}
+};
+
+class WebRtcPeerSendrecv extends WebRtcPeer {
+	constructor(configuration) {
+		configuration.mode = 'sendrecv';
+		super(configuration);
+	}
+};
+
+module.exports = {
+	WebRtcPeerRecvonly: WebRtcPeerRecvonly,
+	WebRtcPeerSendonly: WebRtcPeerSendonly
+};
diff --git a/openmeetings-web/src/main/front/settings/src/index.js b/openmeetings-web/src/main/front/settings/src/index.js
index 982236461..7edcb067f 100644
--- a/openmeetings-web/src/main/front/settings/src/index.js
+++ b/openmeetings-web/src/main/front/settings/src/index.js
@@ -1,5 +1,8 @@
 /* Licensed under the Apache License, Version 2.0 (the "License") http://www.apache.org/licenses/LICENSE-2.0 */
 const VideoUtil = require('./video-util');
+require('webrtc-adapter');
+const {v4: uuidv4} = require('uuid');
+const {WebRtcPeerRecvonly, WebRtcPeerSendonly} = require('./WebRtcPeer');
 
 if (window.hasOwnProperty('isSecureContext') === false) {
 	window.isSecureContext = window.location.protocol == 'https:' || ["localhost", "127.0.0.1"].indexOf(window.location.hostname) !== -1;
@@ -10,9 +13,9 @@ Object.assign(window, {
 	, VIDWIN_SEL: VideoUtil.VIDWIN_SEL
 	, VID_SEL: VideoUtil.VID_SEL
 	, MicLevel: require('./mic-level')
+	, WebRtcPeerRecvonly: WebRtcPeerRecvonly
+	, WebRtcPeerSendonly: WebRtcPeerSendonly
 	, VideoSettings: require('./settings')
 
-	// AdapterJS is not added for now
-	, kurentoUtils: require('kurento-utils')
-	, uuidv4: require('uuid/v4')
+	, uuidv4: uuidv4
 });
diff --git a/openmeetings-web/src/main/front/settings/src/mic-level.js b/openmeetings-web/src/main/front/settings/src/mic-level.js
index 32668f69b..9fb3edc00 100644
--- a/openmeetings-web/src/main/front/settings/src/mic-level.js
+++ b/openmeetings-web/src/main/front/settings/src/mic-level.js
@@ -5,11 +5,7 @@ module.exports = class MicLevel {
 	constructor() {
 		let ctx, mic, analyser, vol = .0, vals = new RingBuffer(100);
 
-		this.meterPeer = (rtcPeer, cnvs, _micActivity, _error, connectAudio) => {
-			if (!rtcPeer || ('function' !== typeof(rtcPeer.getLocalStream) && 'function' !== typeof(rtcPeer.getRemoteStream))) {
-				return;
-			}
-			const stream = rtcPeer.getLocalStream() || rtcPeer.getRemoteStream();
+		this.meterStream = (stream, cnvs, _micActivity, _error, connectAudio) => {
 			if (!stream || stream.getAudioTracks().length < 1) {
 				return;
 			}
diff --git a/openmeetings-web/src/main/front/settings/src/settings.js b/openmeetings-web/src/main/front/settings/src/settings.js
index 633eaf61e..6c16d4cc3 100644
--- a/openmeetings-web/src/main/front/settings/src/settings.js
+++ b/openmeetings-web/src/main/front/settings/src/settings.js
@@ -1,7 +1,6 @@
 /* Licensed under the Apache License, Version 2.0 (the "License") http://www.apache.org/licenses/LICENSE-2.0 */
 const MicLevel = require('./mic-level');
 const VideoUtil = require('./video-util');
-const kurentoUtils = require('kurento-utils');
 
 const DEV_AUDIO = 'audioinput'
 	, DEV_VIDEO = 'videoinput'
@@ -149,7 +148,7 @@ function _setCntsDimensions(cnts) {
 //each bool OR https://developer.mozilla.org/en-US/docs/Web/API/MediaTrackConstraints
 // min/ideal/max/exact/mandatory can also be used
 function _constraints(sd, callback) {
-	_getDevConstraints(function(devCnts){
+	_getDevConstraints(function(devCnts) {
 		const cnts = {};
 		if (devCnts.video && false === o.audioOnly && VideoUtil.hasCam(sd) && s.video.cam > -1) {
 			cnts.video = {
@@ -202,39 +201,32 @@ function _readValues(msg, func) {
 	_constraints(null, function(cnts) {
 		if (cnts.video !== false || cnts.audio !== false) {
 			const options = VideoUtil.addIceServers({
-				localVideo: vid[0]
-				, mediaConstraints: cnts
+				mediaConstraints: cnts
+				, onIceCandidate: _onIceCandidate
 			}, msg);
-			rtcPeer = new kurentoUtils.WebRtcPeer.WebRtcPeerSendonly(
-				options
-				, function(error) {
-					if (error) {
-						if (true === rtcPeer.cleaned) {
-							return;
-						}
-						return OmUtil.error(error);
-					}
+			navigator.mediaDevices.getUserMedia(cnts)
+				.then(stream => {
+					vid[0].srcObject = stream;
+					options.mediaStream = stream;
+
+					rtcPeer = new WebRtcPeerSendonly(options);
 					if (cnts.audio) {
 						lm.show();
 						level = new MicLevel();
-						level.meterPeer(rtcPeer, lm, function(){}, OmUtil.error, false);
+						level.meterStream(stream, lm, function(){}, OmUtil.error, false);
 					} else {
 						lm.hide();
 					}
-					rtcPeer.generateOffer(function(error, _offerSdp) {
-						if (error) {
-							if (true === rtcPeer.cleaned) {
-								return;
-							}
-							return OmUtil.error('Error generating the offer');
-						}
-						if (typeof(func) === 'function') {
-							func(_offerSdp, cnts);
-						} else {
-							_allowRec(true);
-						}
-					});
-				});
+					return rtcPeer.createOffer();
+				})
+				.then(sdpOffer => {
+					rtcPeer.processLocalOffer(sdpOffer);
+					if (typeof(func) === 'function') {
+						func(sdpOffer.sdp, cnts);
+					} else {
+						_allowRec(true);
+					}
+				}).catch(_ => OmUtil.error('Error generating the offer'));
 		}
 		if (!msg) {
 			_updateRec();
@@ -384,75 +376,49 @@ function _onKMessage(m) {
 					, video: cnts.video !== false
 					, audio: cnts.audio !== false
 				}, MsgBase);
-				rtcPeer.on('icecandidate', _onIceCandidate);
 			});
 			break;
-		case 'canPlay':
-			{
-				const options = VideoUtil.addIceServers({
-					remoteVideo: vid[0]
-					, mediaConstraints: {audio: true, video: true}
-					, onicecandidate: _onIceCandidate
-				}, m);
-				_clear();
-				rtcPeer = new kurentoUtils.WebRtcPeer.WebRtcPeerRecvonly(
-					options
-					, function(error) {
-						if (error) {
-							if (true === rtcPeer.cleaned) {
-								return;
-							}
-							return OmUtil.error(error);
-						}
-						rtcPeer.generateOffer(function(error, offerSdp) {
-							if (error) {
-								if (true === rtcPeer.cleaned) {
-									return;
-								}
-								return OmUtil.error('Error generating the offer');
-							}
-							OmUtil.sendMessage({
-								id : 'play'
-								, sdpOffer: offerSdp
-							}, MsgBase);
-						});
-					});
-				}
+		case 'canPlay': {
+			const options = VideoUtil.addIceServers({
+				mediaConstraints: {audio: true, video: true}
+				, onIceCandidate: _onIceCandidate
+			}, m);
+			_clear();
+			rtcPeer = new WebRtcPeerRecvonly(options);
+			rtcPeer.createOffer()
+				.then(sdpOffer => {
+					rtcPeer.processLocalOffer(sdpOffer);
+					OmUtil.sendMessage({
+						id : 'play'
+						, sdpOffer: sdpOffer.sdp
+					}, MsgBase);
+				})
+				.catch(_ => OmUtil.error('Error generating the offer'));
+			}
 			break;
 		case 'playResponse':
 			OmUtil.log('Play SDP answer received from server. Processing ...');
-			rtcPeer.processAnswer(m.sdpAnswer, function(error) {
-				if (error) {
-					if (true === rtcPeer.cleaned) {
-						return;
-					}
-					return OmUtil.error(error);
-				}
-				lm.show();
-				level = new MicLevel();
-				level.meterPeer(rtcPeer, lm, function(){}, OmUtil.error, true);
-			});
+
+			rtcPeer.processRemoteAnswer(m.sdpAnswer)
+				.then(() => {
+					const stream = rtcPeer.stream;
+					if (stream) {
+						vid[0].srcObject = stream;
+						lm.show();
+						level = new MicLevel();
+						level.meterStream(stream, lm, function(){}, OmUtil.error, true);
+					};
+				})
+				.catch(error => OmUtil.error(error));
 			break;
 		case 'startResponse':
 			OmUtil.log('SDP answer received from server. Processing ...');
-			rtcPeer.processAnswer(m.sdpAnswer, function(error) {
-				if (error) {
-					if (true === rtcPeer.cleaned) {
-						return;
-					}
-					return OmUtil.error(error);
-				}
-			});
+			rtcPeer.processRemoteAnswer(m.sdpAnswer)
+				.catch(error => OmUtil.error(error));
 			break;
 		case 'iceCandidate':
-			rtcPeer.addIceCandidate(m.candidate, function(error) {
-				if (error) {
-					if (true === rtcPeer.cleaned) {
-						return;
-					}
-					return OmUtil.error('Error adding candidate: ' + error);
-				}
-			});
+			rtcPeer.addIceCandidate(m.candidate)
+				.catch(error => OmUtil.error('Error adding candidate: ' + error));
 			break;
 		case 'recording':
 			timer.show().find('.time').text(m.time);
diff --git a/openmeetings-web/src/main/front/settings/src/video-util.js b/openmeetings-web/src/main/front/settings/src/video-util.js
index 2d9c28c3f..d13f1e5c9 100644
--- a/openmeetings-web/src/main/front/settings/src/video-util.js
+++ b/openmeetings-web/src/main/front/settings/src/video-util.js
@@ -184,11 +184,10 @@ function _cleanStream(stream) {
 		stream.getTracks().forEach(track => track.stop());
 	}
 }
-function _cleanPeer(peer) {
-	if (!!peer) {
-		peer.cleaned = true;
+function _cleanPeer(rtcPeer) {
+	if (!!rtcPeer) {
 		try {
-			const pc = peer.peerConnection;
+			const pc = rtcPeer.pc;
 			if (!!pc) {
 				pc.getSenders().forEach(sender => {
 					try {
@@ -208,22 +207,8 @@ function _cleanPeer(peer) {
 						OmUtil.log('Failed to clean receiver' + e);
 					}
 				});
-				pc.onconnectionstatechange = null;
-				pc.ontrack = null;
-				pc.onremovetrack = null;
-				pc.onremovestream = null;
-				pc.onicecandidate = null;
-				pc.oniceconnectionstatechange = null;
-				pc.onsignalingstatechange = null;
-				pc.onicegatheringstatechange = null;
-				pc.onnegotiationneeded = null;
 			}
-			peer.dispose();
-			peer.removeAllListeners('icecandidate');
-			delete peer.generateOffer;
-			delete peer.processAnswer;
-			delete peer.processOffer;
-			delete peer.addIceCandidate;
+			rtcPeer.dispose();
 		} catch(e) {
 			//no-op
 		}


[openmeetings] 02/03: [OPENMEETINGS-2732] tinyify, ua-parser-js, terser

Posted by so...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

solomax pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/openmeetings.git

commit ffef66bcfd000887aa845c3019e3f3e9bb50d669
Author: Maxim Solodovnik <so...@gmail.com>
AuthorDate: Thu Dec 8 15:32:17 2022 +0700

    [OPENMEETINGS-2732] tinyify, ua-parser-js, terser
---
 openmeetings-web/src/main/front/chat/package.json     | 2 +-
 openmeetings-web/src/main/front/main/package.json     | 4 ++--
 openmeetings-web/src/main/front/room/package.json     | 2 +-
 openmeetings-web/src/main/front/settings/package.json | 2 +-
 openmeetings-web/src/main/front/wb/package.json       | 2 +-
 5 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/openmeetings-web/src/main/front/chat/package.json b/openmeetings-web/src/main/front/chat/package.json
index 124eb4746..cbe5592e1 100644
--- a/openmeetings-web/src/main/front/chat/package.json
+++ b/openmeetings-web/src/main/front/chat/package.json
@@ -13,6 +13,6 @@
   "rat-license": "Licensed under the Apache License, Version 2.0 (the \"License\") http://www.apache.org/licenses/LICENSE-2.0",
   "devDependencies": {
     "browserify": "^17.0.0",
-    "tinyify": "^3.1.0"
+    "tinyify": "^4.0.0"
   }
 }
diff --git a/openmeetings-web/src/main/front/main/package.json b/openmeetings-web/src/main/front/main/package.json
index f56e781fc..372d262b1 100644
--- a/openmeetings-web/src/main/front/main/package.json
+++ b/openmeetings-web/src/main/front/main/package.json
@@ -13,9 +13,9 @@
   "rat-license": "Licensed under the Apache License, Version 2.0 (the \"License\") http://www.apache.org/licenses/LICENSE-2.0",
   "devDependencies": {
     "browserify": "^17.0.0",
-    "tinyify": "^3.1.0"
+    "tinyify": "^4.0.0"
   },
   "dependencies": {
-    "ua-parser-js": "^1.0.2"
+    "ua-parser-js": "^1.0.32"
   }
 }
diff --git a/openmeetings-web/src/main/front/room/package.json b/openmeetings-web/src/main/front/room/package.json
index 057126848..dbaf2f2d4 100644
--- a/openmeetings-web/src/main/front/room/package.json
+++ b/openmeetings-web/src/main/front/room/package.json
@@ -13,7 +13,7 @@
   "rat-license": "Licensed under the Apache License, Version 2.0 (the \"License\") http://www.apache.org/licenses/LICENSE-2.0",
   "devDependencies": {
     "browserify": "^17.0.0",
-    "tinyify": "^3.1.0"
+    "tinyify": "^4.0.0"
   },
   "dependencies": {
     "nosleep.js": "^0.12.0"
diff --git a/openmeetings-web/src/main/front/settings/package.json b/openmeetings-web/src/main/front/settings/package.json
index 8437ecc37..a3a075c82 100644
--- a/openmeetings-web/src/main/front/settings/package.json
+++ b/openmeetings-web/src/main/front/settings/package.json
@@ -13,7 +13,7 @@
   "rat-license": "Licensed under the Apache License, Version 2.0 (the \"License\") http://www.apache.org/licenses/LICENSE-2.0",
   "devDependencies": {
     "browserify": "^17.0.0",
-    "tinyify": "^3.1.0"
+    "tinyify": "^4.0.0"
   },
   "dependencies": {
     "freeice": "2.2.2",
diff --git a/openmeetings-web/src/main/front/wb/package.json b/openmeetings-web/src/main/front/wb/package.json
index 3b8787154..3e7b33e4f 100644
--- a/openmeetings-web/src/main/front/wb/package.json
+++ b/openmeetings-web/src/main/front/wb/package.json
@@ -14,7 +14,7 @@
   "devDependencies": {
     "browserify": "^17.0.0",
     "esmify": "^2.1.1",
-    "terser": "^5.15.0"
+    "terser": "^5.16.1"
   },
   "dependencies": {
     "fabric": "^5.2.4",