<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>UgoTrade &#187; sensor networks</title>
	<atom:link href="https://www.ugotrade.com/tag/sensor-networks/feed/" rel="self" type="application/rss+xml" />
	<link>https://www.ugotrade.com</link>
	<description>Augmented Realities at the Edge of the Network</description>
	<lastBuildDate>Wed, 25 May 2016 15:59:56 +0000</lastBuildDate>
	<language>en-US</language>
		<sy:updatePeriod>hourly</sy:updatePeriod>
		<sy:updateFrequency>1</sy:updateFrequency>
	<generator>https://wordpress.org/?v=3.9.40</generator>
	<item>
		<title>Toward the Sentient City: The Future of the Outernet and How to Imagine it?</title>
		<link>https://www.ugotrade.com/2009/11/09/toward-the-sentient-city-the-future-of-the-outernet-and-how-to-imagine-it/</link>
		<comments>https://www.ugotrade.com/2009/11/09/toward-the-sentient-city-the-future-of-the-outernet-and-how-to-imagine-it/#comments</comments>
		<pubDate>Mon, 09 Nov 2009 21:09:00 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[3rd cloud]]></category>
		<category><![CDATA[Adam Greenfield]]></category>
		<category><![CDATA[aesthetics of distributed participation]]></category>
		<category><![CDATA[Amphibious Architecture]]></category>
		<category><![CDATA[architectures of participation]]></category>
		<category><![CDATA[asynchronous city]]></category>
		<category><![CDATA[Benjamin H. Bratton]]></category>
		<category><![CDATA[Breakout!]]></category>
		<category><![CDATA[Conflux 2009]]></category>
		<category><![CDATA[Dan Hill]]></category>
		<category><![CDATA[Dharma Dailey]]></category>
		<category><![CDATA[distributed open AR]]></category>
		<category><![CDATA[Enrique Ramirez]]></category>
		<category><![CDATA[everyware]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[human electric hybrid]]></category>
		<category><![CDATA[hybrid social netoworks]]></category>
		<category><![CDATA[julian Bleeker]]></category>
		<category><![CDATA[Laura Forlano]]></category>
		<category><![CDATA[location aware applications]]></category>
		<category><![CDATA[Mark Shepard]]></category>
		<category><![CDATA[Martijn de Waal]]></category>
		<category><![CDATA[Matthew Fuller]]></category>
		<category><![CDATA[Mimi Zeiger]]></category>
		<category><![CDATA[Natalie Jeremijenko]]></category>
		<category><![CDATA[Natural Fuse]]></category>
		<category><![CDATA[new architectures of participation]]></category>
		<category><![CDATA[Nicolas Nova]]></category>
		<category><![CDATA[Omar Khan]]></category>
		<category><![CDATA[Open AR]]></category>
		<category><![CDATA[outernet]]></category>
		<category><![CDATA[Philip Beesley]]></category>
		<category><![CDATA[real time communication]]></category>
		<category><![CDATA[real time web]]></category>
		<category><![CDATA[real-time database enable city]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[Sentient City Survival Kit]]></category>
		<category><![CDATA[Situated Technologies]]></category>
		<category><![CDATA[smart things]]></category>
		<category><![CDATA[social mobility]]></category>
		<category><![CDATA[social mobility and the 3rd cloud]]></category>
		<category><![CDATA[synchronous internet of things]]></category>
		<category><![CDATA[The Copenhagen Wheel]]></category>
		<category><![CDATA[The Living Architecture Lab]]></category>
		<category><![CDATA[the social negotiation of Technology]]></category>
		<category><![CDATA[Too Smart City]]></category>
		<category><![CDATA[Toward the Sentient City]]></category>
		<category><![CDATA[Trash Track]]></category>
		<category><![CDATA[urban sustainability]]></category>
		<category><![CDATA[urbanware]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[Web Squared]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4758</guid>
		<description><![CDATA[Amphibious Architecture &#8211; &#8220;submerges ubiquitous computing into the waterâ€”that 90% of the Earthâ€™s inhabitable volume that envelops New York City but remains under-explored and under-engaged.&#8221; Toward the Sentient City, brought &#8220;architects and urban designers into a conversation that until now has been limited largely to technologists,â€ and created an extraordinary opportunity to investigate distributed architectures [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.sentientcity.net/exhibit/?p=603" target="_blank"><span id="n.6p" title="Click to view full content"> </span></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-12.03.40-AM.png"><img class="alignnone size-medium wp-image-4783" title="Screen shot 2009-11-06 at 12.03.40 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-12.03.40-AM-300x200.png" alt="Screen shot 2009-11-06 at 12.03.40 AM" width="300" height="200" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/dhj5mk2g_404g3prc6dc_b.jpg"><img class="alignnone size-medium wp-image-4759" title="dhj5mk2g_404g3prc6dc_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/dhj5mk2g_404g3prc6dc_b-300x199.jpg" alt="dhj5mk2g_404g3prc6dc_b" width="300" height="199" /></a><br />
<span id="ot:x" title="Click to view full content"> </span></p>
<p><em><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank"><span id="it_d" title="Click to view full content">Amphibious </span>Architecture</a> &#8211; &#8220;submerges ubiquitous computing into the waterâ€”that 90% of the Earthâ€™s inhabitable volume that envelops New York City but remains under-explored and under-engaged.&#8221;</em></p>
<p><a href="http://www.sentientcity.net/exhibit/">Toward the Sentient City</a>,<span id="ju31" title="Click to view full content"> brought </span> &#8220;architects and urban designers into a conversation that until now has been limited largely to technologists,â€ and <span id="hb:z" title="Click to view full content">created an extraordinary opportunity to investigate distributed architectures of participation of what we might call the &#8220;outernet.&#8221;Â  This is a</span><span id="hb:z" title="Click to view full content"> timely conversation as &#8220;web squared,&#8221;Â  &#8220;smart things,&#8221; the &#8220;internet of things,&#8221; or the &#8220;outernet,&#8221;</span><span id="g6ad" title="Click to view full content"> and their popular &#8220;ambassador&#8221; augmented reality are rapidly becoming everyone&#8217;s &#8220;business.&#8221;</span><span id="eb9y" title="Click to view full content"> From </span><span id="b265" title="Click to view full content">&#8220;evil&#8221; marketers, to global corporations, </span><span id="sq48" title="Click to view full content">environmentalists, artists and community activists -Â  everyone, it seems, is</span><span id="mqn_" title="Click to view full content"> interested in the possibilities of this new frontier.</span></p>
<p><span id="ot:x" title="Click to view full content">It is a challenging task to respond to, </span><a href="http://www.sentientcity.net/exhibit/">Toward the Sentient City</a><span id="ot:x" title="Click to view full content">, an exhibition whose backdrop includes a series of conversations on Situated Technologies &#8211; published by the Architectural League, from a circle of people who have been thinking, writing, and speaking on networked urbanism for many years now, including: Adam Greenfield, </span><span id="vjks" title="Click to view full content"> Mark Shepard, Matthew Fuller, Usman Haque, Benjamin H. Bratton, Natalie JeremiJenko, Laura Forlano, Dharma Dailey,Â  Philip Beesley, Omar Khan, Julian Bleeker, Nicolas Nova</span><span id="o7yp" title="Click to view full content">.Â  And the exhibition itself has a very thoughtful group of respondents, see posts from: <a href="http://www.sentientcity.net/exhibit/?p=595" target="_blank">Dan Hill</a>, <a href="http://www.sentientcity.net/exhibit/?p=659" target="_blank">Martijn de Waal,</a> <a href="http://www.sentientcity.net/exhibit/?p=622" target="_blank">Enrique Ramirez</a>, and <a href="http://www.sentientcity.net/exhibit/?p=603" target="_blank">Mimi Zeiger.</a></span><a href="http://www.sentientcity.net/exhibit/?p=603" target="_blank"><span id="n.6p" title="Click to view full content"> </span></a></p>
<p>But one ofÂ  Toward the Sentient City&#8217;s key accomplishments was to go beyond the rhetorical, and to put practical examples out into the world to<span id="ijgh" title="Click to view full content"> organize a discussion on some of the ideas and possibilities of ubiquitous computing that have barely begun to emerge from academic research, and entrepreneurial blue skying.Â  As curator, </span><a href="http://www.andinc.org/v3/" target="_blank">Mark Shepard</a><span id="ijgh" title="Click to view full content">, explained:<br />
</span></p>
<p><strong><span id="fqkh" title="Click to view full content">&#8220;The </span></strong><strong><span id="tq6_" title="Click to view full content"><span>aim is to provide concrete examples in the present around which to organize a discussion about just what kind of future we might want. Whether theyâ€™re prototypes or not, these commissions are concrete examples. Theyâ€™re not abstract ideas. And we can go stand next to each other and look at and interact with something which is out there in the world behaving in the way it behaves, performing as it does, and we can then begin to have a discussion about it that is less dependent upon powers of rhetoric.</span> So itâ€™s not about me persuading you about an idea but itâ€™s about us evaluating something thatâ€™s living and existing in this world. And that was really the intention of the show.â€</span></strong></p>
<p><span id="ijgh" title="Click to view full content">The commissioned works </span><span id="d4-:" title="Click to view full content">-<a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank"> Amphibious Arc</a></span><span id="d4-:" title="Click to view full content"><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">hitecture</a>, <a href="http://www.sentientcity.net/exhibit/?p=53" target="_blank">Breakout!</a>, <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a>, <a href="http://www.sentientcity.net/exhibit/?p=59" target="_blank">Too Smart City</a>, and <a href="http://www.sentientcity.net/exhibit/?p=31" target="_blank">TrashTrack,</a> </span><span id="xnxp" title="Click to view full content">that were the hub of Toward the Sentient City&#8217;s </span><span id="g.08" title="Click to view full content"> events, themes and texts, provided a unique glimpse</span><span id="j-jh" title="Click to view full content"> at </span><span id="pa9i" title="Click to view full content">some of the possible dystopian and utopian futures of a &#8220;smart&#8221; city.Â  But, most importantly,Â  all the works questioned what might be new </span><span id="ijgh" title="Click to view full content">architectures of participation for a sentient city. </span></p>
<h3>New Architectures of Participation: Hybrid Social Networks with Human and Non-human Participants .</h3>
<p>Of the five works, Amphibious Architecture and Natural Fuse were particularly fascinating to me because they explored the possibilities of sensor networks to create new forms of distributed participation in networked ecosystems that connected the experience/trajectories of human and non human actors &#8211; fish, plants,Â  and people.</p>
<p>Both Amphibious Architecture, andÂ  &#8220;Natural Fuse&#8221; &#8211; from Usman Haque and <a href="http://www.haque.co.uk/" target="_blank">Haque Design + Research,</a> gave exhibition attendees the chance to experience at a personal level our relationships with our non-human neighbors.</p>
<p><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank"><span id="it_d" title="Click to view full content">Amphibious </span>Architecture</a> from the The Living Architecture Lab at Columbia University Graduate School of Architecture, Planning and Preservation (Directors David Benjamin and Soo-in Yang) and Natalie Jeremijenko, Environmental Health Clinic at New York University, <span id="w.m9" title="Click to view full content">used a sensor array to &#8220;pierce the reflective </span><span id="ud4u" title="Click to view full content">surface of the water&#8221; that</span> separates us from the underwater ecosystem below.Â  <span id="kfwr" title="Click to view full content">The sensor arrays just below the surface of the East River andÂ  floating light array</span> (see picture on left opening this post) create a new interface between people and fish whose movements and water quality are transmitted in light.</p>
<p>One could also SMS the fish and the single beaver that lives in the rivers surrounding NYC to find the conditions they were experiencing.<span id="cehj" title="Click to view full content"> But t</span><span id="y9m6" title="Click to view full content">urning the city&#8217;s &#8220;back stories,&#8221; like the movements of &#8220;Yo beaver,&#8221; and the oxygen levels and water quality of the rivers into &#8220;fore stories,&#8221; is only one of the many ways Natalie JeremiJenko explores how we can engender the empathy necessary for humans and non humans to live in harmony and mutual benefit.</span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/nataliefishandmicrochips.jpg"><img class="alignnone size-medium wp-image-4802" title="nataliefishandmicrochips" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/nataliefishandmicrochips-300x199.jpg" alt="nataliefishandmicrochips" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/fishfoodpost.jpg"><img class="alignnone size-medium wp-image-4803" title="fishfoodpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/fishfoodpost-300x199.jpg" alt="fishfoodpost" width="300" height="199" /></a></p>
<p><span id="y9m6" title="Click to view full content"> </span>Toward the Sentient City also held workshops/presentations in conjunction with <a href="http://confluxfestival.org/2009/" target="_blank">Conflux 2009</a>. After her Conflux presentation, Natalie Jeremijenko of Amphibious Architecture (which is also a collaborative project between <a href="http://www.environmentalhealthclinic.net/">xClinic</a>, <a href="http://www.thelivingnewyork.com/">The Living</a><span id="wz9v" title="Click to view full content">, </span>&#8220;and other intelligent creatures on the East River&#8221;)Â  invited participants to enjoy a lunch of cross-species foods at the East River site.Â  <span id="k2u." title="Click to view full content"> </span></p>
<p><span id="k2u." title="Click to view full content">The cross-species lunch takes </span><span id="x0h." title="Click to view full content"> an existing interaction pattern through which people and fish are already communicating, </span><span id="tkk5" title="Click to view full content">i.e., people going to the river â€“ the waterfront,Â  and feeding the fish</span><span id="vct4" title="Click to view full content"> Wonder Bread (which is bad for humans and fish); and transforms this desire to feed the fish into something which actually can remove the mercury content from the fish and our bodies by removing it from the food chain, so a previously inharmonious connection between people and fish, is redirected into a productive interaction benefitting both species.Â  As it turns out, food that is good for Fish (see pictures above), and removes mercury from their bodies can also be nutritious and tasty for humans. </span></p>
<p><a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a>, from team members, Usman Haque, creative director, Nitipak â€˜Dotâ€™ Samsen, designer, Ai Hasegawa, designer, Cesar Harada, designer, Barbara Jasinowicz, producer, used sensors toÂ <span id="oenx" title="Click to view full content"> link humans and plants in network where we are accountable for how our behavior effects others in your ecosystem. </span></p>
<p><span id="oenx" title="Click to view full content">If you brought an ordinary plant to the exhibition, you could take home an electronically assisted plant and become part of a social network of humans and plants. This network of humans and electronically assisted plants is also a carbon sink and ifÂ  more energy is consumed than the total number of plants in the social network can offset, plants begin to die giving immediate feedback and consequences to being greedy about energy consumption. </span><span id="ijgh" title="Click to view full content">For more about joining the Natural Fuse network see<a href="http://www.naturalfuse.org" target="_blank"> here.</a><br />
</span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusepres.jpg"><img title="naturalfusepres" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusepres-300x199.jpg" alt="naturalfusepres" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusetakehome.jpg"><img title="naturalfusetakehome" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusetakehome-300x199.jpg" alt="naturalfusetakehome" width="300" height="199" /></a></p>
<p><span id="pa9i" title="Click to view full content"> </span><span id="w-ed" title="Click to view full content">We are in the pre-dawn ofÂ  sensor networks like those Natural Fuse and Amphibious Architecture created &#8211; social</span><span id="n.6p" title="Click to view full content"> networksÂ  that link human and non human participants in entirely new ways are largely an uncharted territory. </span><span id="o7yp" title="Click to view full content">(Note: T</span><span id="zr9t" title="Click to view full content">he upcoming <a href="http://www.situatedtechnologies.net/" target="_blank">Situated Technologies</a> Pamphlet 6</span><span id="ijgh" title="Click to view full content"> &#8211; <strong>&#8220;Micro Public Places,&#8221; </strong>Marc Bohlen and Hans Frei, indicates it will continue the journey with an investigation ofÂ  &#8220;transparent and distributed participation.&#8221;)</span></p>
<h3>Where Does the Social Negotiation ofÂ  Technology Happen?</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/markshepardpost.jpg"><img class="alignnone size-medium wp-image-4825" title="markshepardpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/markshepardpost-199x300.jpg" alt="markshepardpost" width="199" height="300" /></a></p>
<p>Frequent questions that came up at the presentations given by the teams that produced the commissioned works were: Does this idea scale?Â Â  Does it close the loop in that you <span>get answers to the questions asked?Â  How does the conversation gain agency?Â  And where does the social negotiation of technology happen?Â  (These last two questions were asked by <a href="http://www.orangecone.com/" target="_blank">Mike Kuniavsky</a> at Mark Shepardâ€™s presentation at Conflux: â€œ</span><a id="ktb-" title="Sentient City Survival Kit" href="http://survival.sentientcity.net/" target="_blank"><span>Sentient City Survival Kit</span></a><span>.â€ â€“ see picture above)Â  I think it is fair to say that these questions for the most part remain unanswered. But Toward the Sentient city was alive with ideas and practical examples about ways we can explore these questions more deeply.</span></p>
<p><span id="oenx" title="Click to view full content">Usman Haque in response to the question, &#8220;Does this experiment scale?,&#8221; replied:</span></p>
<p><strong>&#8220;it would, but at an individual level because it has to remain at the individual level because it is about the individual in relationship to the wider social context as opposed to building a forest to offset a city it is about each individual making choices of their own about what they do andÂ  having some kind of knowledge about the effect they are having on other people because most of the time we are quite complacent &#8211; we are able to do whatever we want because we are not necessarily aware how our intrusions effect both human and non-human neighbors&#8230;.&#8217;</strong></p>
<p>So how does this close the loop?Â  Usman explains that one of the key aspects for him is that if you do take home a plant you become part of a system in which you are no longer anonymous and if a plant is threatened (plants get three lives) you have the opportunity to email the person in the system who has threatened your plant.Â  Usman noted that one of the interesting things that happened in the context of the exhibition, where there was a single unit, was that 90% of the time people switched it on to selfish mode &#8211; presumably because they were anonymous.Â  Another aspect of Natural Fuse that raises interesting questions is that as more people decide to join the network the risk of a plant being harmed by any particular individual&#8217;s selfishness lessens.Â  As <a href="http://www.sentientcity.net/exhibit/?p=659" target="_blank">Martijn de Waal</a>,<span id="gi2_" title="Click to view full content"> in his response that unpacks some of the deeper philosophical, epistemological, and ethical questions that Natural Fuse addresses, observes:</span></p>
<p><strong>&#8220;The concept of a commons thus assumes cooperation and mutual accommodation. Could Sentient Technology play a role in the allocation of limited resources between citizens? Could it lead to the emergence of some sort of peer-to-peer governance model, that could prevent overusage of scarce resources?&#8221;</strong></p>
<h3><strong><br />
New Aesthetics of Distributed Participation</strong></h3>
<p><span id="nqx:" title="Click to view full content">The works of, </span><span id="nqx:" title="Click to view full content"><span> &#8220;Toward the Sentient City&#8221; point to possibilities for a new aesthetics of distributed participation in which users and system are no longer separated but instead â€œdevelop joint forms of observing and knowing that neither [...] is capable on its own.â€ (quote from upcoming, <a href="http://www.situatedtechnologies.net/" target="_blank">Situated Technologies Pamphlets</a></span> 6: Micro Public Places, Marc Bohlen and Hans Frei).Â  Natural Fuse and Amphibious Architecture examine the new transactional realities of the Sentient City.</span></p>
<p><span id="po-s" title="Click to view full content"> But there are many questions left unanswered.Â  We know a lot about the power of generativity from the </span>internet (see Zittrain)-Â  the ur<strong> &#8220;architecture of participation.&#8221;</strong> <span id="hri-" title="Click to view full content">As Zittrain points out, the &#8220;generativity&#8221; of the internet is &#8220;the engine that has catapaulted the internet from backwater to ubiquity.&#8221; </span> Tim O&#8217;Reilly coined the phrase, &#8220;architecture of participation,&#8221; to &#8220;describe the nature of systems that are designed for user contribution,&#8221;<span id="o7et" title="Click to view full content"> such that &#8220;participants extend the reach/increase the value of the system.&#8221;Â  But as Tim O&#8217;Reilly put it in his recent talk, &#8220;<a href="http://www.slideshare.net/timoreilly/state-of-the-internet-operating-system" target="_blank">State of the Internet Operating System:&#8221;</a></span></p>
<p><span title="Click to view full content"><strong>&#8220;Web 2.0 is about finding meaning in user-generated data, and turning that meaning into real-time user facing services.Â  &#8220;Web Squared&#8221; takes that same concept to real-time sensor data.&#8221;</strong><br />
</span></p>
<p><span id="o7et" title="Click to view full content">We know little yet about what constitutes generativity for the &#8220;outernet,&#8221; particularly for the kind ofÂ  hybrid social networks that Natural Fuse and Amphibious Architecture present.Â  Social Networks that connect people and place, humans and non humans, challenge dichotomies of man and nature, and machine and user in new and unexpected ways.</span></p>
<p>At the moment, the internet is going through a metamorphosis with the emergence of real time technologies like XMPP, PubHubSubBub and Google Wave and the coming of age of mobile computing.Â Â  While these shifts were not investigated specifically in any of the commissioned works I think all the worksÂ  begged the question,Â  What is a common platform for social interaction in the &#8220;outernet,&#8221; or sentient city?Â  I was not entirely satisfied, from this point of view, with a web interface for Natural Fuse or SMS as a mobile interface for Amphibious Architecture.</p>
<p><a href="http://www.media.mit.edu/people/dpreed" target="_blank">David P. Reed</a> points to the relationship between social mobility what he describes as the 3rd cloudÂ  and the need for a common platform (see <a href="http://www.slideshare.net/venicesessions/david-reed-social-mobility-and-the-3rd-cloud" target="_blank">David Reed &#8211; Social Mobility and the 3rd Cloud</a>. Hat tip to <a href="http://twitter.com/srenan" target="_blank">@srenan</a> for pointing me to David&#8217;s presentation).</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.11.25-PM.png"><img class="alignnone size-medium wp-image-4826" title="Screen shot 2009-11-06 at 11.11.25 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.11.25-PM-300x222.png" alt="Screen shot 2009-11-06 at 11.11.25 PM" width="300" height="222" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.16.59-PM1.png"><img class="alignnone size-medium wp-image-4828" title="Screen shot 2009-11-06 at 11.16.59 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.16.59-PM1-300x222.png" alt="Screen shot 2009-11-06 at 11.16.59 PM" width="300" height="222" /></a></p>
<p><em>Slides above are from David P. Reed&#8217;s presentation,Â <a href="http://www.slideshare.net/venicesessions/david-reed-social-mobility-and-the-3rd-cloud" target="_blank"> Social Mobility and the 3rd Cloud</a></em><a href="http://www.slideshare.net/venicesessions/david-reed-social-mobility-and-the-3rd-cloud" target="_blank"></a></p>
<p>What is an architecture of participation for mobile, social interaction? This is something I am very interested in.</p>
<p>Recently I began a project with a small group of augmented reality developers and enthusiasts to use Google Wave Federation Protocol as a transport system for open distributed, social augmented experiences (lots more to come on this soon &#8211; you can see the back story in my posts <a href="http://www.ugotrade.com/2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/" target="_blank">here</a> and <a href="http://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/" target="_blank">here</a>).Â  Wave has introduced an open federated architecture of participation that <strong style="font-weight: normal;">combines asynchronous &amp; synchronous data,Â  bringingÂ  together the advantages of real-time communication with the persistent hosting of collaborative data (like wikis). </strong><strong> </strong></p>
<p>Augmented Reality puts who you are, where you are, and what you are doing center stage, and is an interface for &#8220;communications embedded in context&#8221; and &#8220;enabled by identity&#8221; &#8211; two key qualities of what David <span>P. Reed calls the 3rd cloud.Â  An open, distributed framework for augmented reality could createÂ  an interconnected sense of AR, one that fuses augmentation, data overlays, and varied media with location/time/place and crucially, social networking.Â  Such an interface would open up many possibilities for the new transactional realities that could </span>integrate real-time cloud based data with a human perspective and social networking.Â  I am using the term,<span> transactional realitiesÂ  to suggest an extension into social augmented experiences ofÂ  what, Di-Ann Eisnor, </span><a id="s050" title="Platial" href="http://www.platial.com/"><span>Platial</span></a><span>, describes as,Â  &#8220;</span><span><span><span>transactional cartography&#8221; &#8211; &#8220;the movement from map providing entertainment/information to map as enabling action&#8221; (see </span><a id="h6.r" title="Human as Sensors" href="http://www.youtube.com/watch?v=Di285pgcZRE&amp;feature=PlayList&amp;p=F664D8C553A57C93&amp;index=3"><span>Human as Sensors</span></a><span>).</span></span></span></p>
<p>We have only just got a glimpse ofÂ  how real time technologies and &#8220;communications embedded in context&#8221; will transform social interaction and our cities.Â  This post on <a id="r3ow" title="Writing as Real-Time Performance" href="http://snarkmarket.com/2009/3605">Writing as Real-Time Performance</a> that looks at the Google Wave playback feature is a brilliant example of how real time technology turns familiar practices like writing inside out, and catapaults us into new time trajectories. And, if you haven&#8217;t already seen Matt Jones of BERG&#8217;s, brilliant look at, <a href="http://berglondon.com/blog/2009/10/26/all-the-time-in-the-world-talk-at-design-by-fire-2009-utrecht/" target="_blank">&#8220;All the time in the world&#8221; </a>- from the &#8220;soft time&#8221; and &#8220;squishy time&#8221; ofÂ  cell phone culture, to their anticedents in real-time computing, go now!Â  Also see Dan Hill&#8217;s work on <a href="http://cityofsound.com" target="_blank">&#8220;time based notation,&#8221;</a> and Tom Carden&#8217;s work for mysociety.org</p>
<p><span> </span></p>
<h3>Transactional Realities Between the &#8220;Asynchronous City&#8221; and the &#8220;Synchronous Internet ofÂ  Things&#8221;</h3>
<p><span> </span><span id="nqbb" title="Click to view full content"><span>Out of Toward the Sentient City&#8217;s five commissioned works,</span><span> only</span></span><span id="n:_n" title="Click to view full content"><span> </span></span><span> </span><a href="http://www.sentientcity.net/exhibit/?p=31" target="_blank"><span>Trash Track</span></a><span> </span><span id="nqbb" title="Click to view full content"></span><span> </span><span id="n:_n" title="Click to view full content"><span>focused on the â€œsynchronized Internet of Things.â€ </span></span><a href="http://www.sentientcity.net/exhibit/?p=31" target="_blank"><span id="n:_n" title="Click to view full content"><span> </span></span></a><span id="n:_n" title="Click to view full content"><span>Trash Track asks what can we learn from the aggregated data streams of â€œsmartâ€ trash about</span></span><span> the infamous path of trash from cities of privilege to rivers of want,Â  rather than</span><span id="rkuc" title="Click to view full content"><span> exploring the the particular transactional realities of a social network that linked people with their trash</span></span><span id="n.6p" title="Click to view full content"> </span></p>
<p><span id="ft58" title="Click to view full content"><br />
<span> </span></span><span id="ft58" title="Click to view full content"> </span><span id="n.6p" title="Click to view full content"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/TrashTrack2.jpg"><img title="TrashTrack2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/TrashTrack2-300x199.jpg" alt="TrashTrack2" width="300" height="199" /></a></span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/TrashTrack2.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack4.jpg"><img title="trashtrack4" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack4-300x199.jpg" alt="trashtrack4" width="300" height="199" /></a><span id="ft58" title="Click to view full content"><span> </span></span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack3.jpg"><img class="alignnone size-medium wp-image-4768" title="trashtrack3" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack3-300x199.jpg" alt="trashtrack3" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrackpost.jpg"><img class="alignnone size-medium wp-image-4782" title="trashtrackpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrackpost-300x199.jpg" alt="trashtrackpost" width="300" height="199" /></a></p>
<p><span id="ft58" title="Click to view full content"><span>The goals of </span></span><span id="ft58" title="Click to view full content"><span>Trash Track </span></span><span id="ft58" title="Click to view full content"><span>were</span></span><span id="ft58" title="Click to view full content"><span>, Assaf </span></span><span id="ft58" title="Click to view full content"><span>Biderman explained during his presentation:</span></span></p>
<p><span id="ft58" title="Click to view full content"><span> <strong>â€œto learn about the removal chain, to see if knowing more cou</strong></span></span><strong><span id="f:mt" title="Click to view full content"><span>ld promote behavioral change, and investigate if smart tagging could one day lead to 100% recycling.â€ </span></span></strong></p>
<p><strong><span id="f:mt" title="Click to view full content"> </span></strong><span>The team from SENSEable City Laboratory, MIT included &#8211; Carlo Ratti: Director, Assaf Biderman: Associate Director, Rex Britter: Advisor, Stephen Miles: Advisor, Kristian Kloeckl Project Leader, Musstanser Tinauli, E Roon Kang, Alan Anderson, Avid Boustani, Natalia Duque Ciceri, Lorenzo Davolli, Samantha Earl, Lewis Girod, Sarabjit Kaur, Armin Linke, Eugenio Morello, Sarah Neilson, Giovanni de Niederhausern, Jill Passano, Renato Rinaldi, Francisca Rojas, Louis Sirota, Malima Wolf.</span></p>
<p><span>However, Assaf,Â  in his presentation, presented another project from SENSEable City Laboratory in partnership with the City of Copenhagen, </span><a href="http://senseable.mit.edu/copenhagenwheel/" target="_blank">The Copenhagen Wheel</a>.Â  <span>This project seems to work brilliantly at the intersection of the &#8220;asynchronous city&#8221; (Bleeker and Nova) and the &#8220;synchronized internet of things&#8221;Â  The &#8220;smart&#8221; wheel &#8211; a low cost, open source, human electric hybrid is:</span></p>
<p><strong>&#8220;an electric bicycle wheel that can be easily retrofitted into any regular bicycle and location and environmental sensors which are powered by the bike wheel and in turn provide data for a variety of applications.&#8221;</strong></p>
<p>This project, that aims to promote urban sustainability through smart biking, opens up many possibilities for a bottom up architecture of participation for the sentient city (<a href="http://senseable.mit.edu/copenhagenwheel/">see video here</a>). <strong><br />
</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-08-at-7.18.45-PM.png"><img class="alignnone size-medium wp-image-4838" title="Screen shot 2009-11-08 at 7.18.45 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-08-at-7.18.45-PM-300x218.png" alt="Screen shot 2009-11-08 at 7.18.45 PM" width="300" height="218" /></a><br />
</strong></p>
<p><a href="http://www.andinc.org/v3/" target="_blank">Mark Shepard</a> describes something he calls &#8220;propagativeÂ  urbanism:&#8221;</p>
<p><strong>&#8220;a way of thinking about shaping the experience of urban space in terms of a bottom-up, participatory approach to the evolution of cities.&#8221; </strong></p>
<p>And, in the most recent pamphlet in the <a href="http://www.situatedtechnologies.net/" target="_blank">Situated Technologies pamphlets </a><span><a href="http://www.situatedtechnologies.net/" target="_blank">series, #5 â€œAsynchonicity Design Fictions for Asynchronous Urban Computing,â€ </a>Julian Bleeker and Nicolas Nova invert an emphasis in the so-called â€œreal-time database enabled cityâ€ with its synchronized Internet of Thingsâ€¦.Â  and speculate on the existence of an â€œasynchronous city.â€Â  They &#8220;forecast situated technologies based on weak signals that show the importance of time on human perspectives.â€Â  They ask:</span></p>
<p><span><strong>&#8220;why, besides &#8216;operational efficiency,&#8217; would we want a ubiquitously computed environment?Â  What are the measures of &#8216;better&#8217; that we want to count as meaningful?&#8221;</strong></span></p>
<p><span>They explain:</span></p>
<p><span><strong>..we are trying to think through what &#8220;urbanwares might be &#8211; urban operating systems &#8211; if they were less about synchronization, top-down construction and connected channels of information and databases and so forth, and more about asynchronized, decentralized things.Â  Software, data, time out of alignment, incongruities, tiles and imbrications of the geographic, spatial parameters into a delicious kind of lively peasant&#8217;s stew.&#8221; </strong><br />
</span></p>
<p><span>One takeaway, perhaps, from Toward the Sentient City is that it&#8217;s at the intersection ofÂ  theÂ  â€œasynchronous cityâ€Â  and theÂ  â€œreal-time database enabled cityâ€ where many new transactional realities of the sentient city will arise.</span></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/11/09/toward-the-sentient-city-the-future-of-the-outernet-and-how-to-imagine-it/feed/</wfw:commentRss>
		<slash:comments>2</slash:comments>
		</item>
		<item>
		<title>ISMAR 2009: An Augmented Reality &#8220;Top Chef&#8221; Coopetition</title>
		<link>https://www.ugotrade.com/2009/10/24/ismar-2009-an-augmented-reality-top-chef-coopetition/</link>
		<comments>https://www.ugotrade.com/2009/10/24/ismar-2009-an-augmented-reality-top-chef-coopetition/#comments</comments>
		<pubDate>Sat, 24 Oct 2009 22:26:42 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Acrossair]]></category>
		<category><![CDATA[AR Sketch]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[ARhrrr]]></category>
		<category><![CDATA[augmented reality at VW]]></category>
		<category><![CDATA[avatars and people together in physical spaces]]></category>
		<category><![CDATA[Avilus]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Chetan Damani]]></category>
		<category><![CDATA[Christine Perey]]></category>
		<category><![CDATA[cloud computing]]></category>
		<category><![CDATA[Dirk Groten]]></category>
		<category><![CDATA[distributed computing]]></category>
		<category><![CDATA[eyewear for augmented reality]]></category>
		<category><![CDATA[geoAR]]></category>
		<category><![CDATA[Georg Klein]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Green Tech AR Competition]]></category>
		<category><![CDATA[HMDs]]></category>
		<category><![CDATA[Humans as Sensors]]></category>
		<category><![CDATA[industrial augmented reality]]></category>
		<category><![CDATA[Institut Graphische Datenverarbeitung]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[ISMAR 2010]]></category>
		<category><![CDATA[ISMAR09]]></category>
		<category><![CDATA[Jay Wright]]></category>
		<category><![CDATA[Joe Ludwig]]></category>
		<category><![CDATA[Junaio]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Mark Billinghurst]]></category>
		<category><![CDATA[Markus Tripp]]></category>
		<category><![CDATA[Metaio]]></category>
		<category><![CDATA[Michael Goesele]]></category>
		<category><![CDATA[Microsoft and augmented reality]]></category>
		<category><![CDATA[Mobile Monday]]></category>
		<category><![CDATA[Mobilizy]]></category>
		<category><![CDATA[MoMo]]></category>
		<category><![CDATA[Noah Zerking]]></category>
		<category><![CDATA[Noora Guldemond]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[open distributed AR]]></category>
		<category><![CDATA[open hardware]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[participatory sensing]]></category>
		<category><![CDATA[Pattie Maes]]></category>
		<category><![CDATA[Peter Meier]]></category>
		<category><![CDATA[Platial]]></category>
		<category><![CDATA[PTAM on an iphone]]></category>
		<category><![CDATA[Put a Spell. Thomas Carpenter]]></category>
		<category><![CDATA[RoomWare]]></category>
		<category><![CDATA[Sean White]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[smart phones]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented realities]]></category>
		<category><![CDATA[standards for augmented reality]]></category>
		<category><![CDATA[Steven Feiner]]></category>
		<category><![CDATA[Technische Universitat Munchen]]></category>
		<category><![CDATA[The RoomWare Project]]></category>
		<category><![CDATA[The Zerkin Glove]]></category>
		<category><![CDATA[tracking and mapping in mobile augmented reality]]></category>
		<category><![CDATA[transactional cartography]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[Vernor Vinge]]></category>
		<category><![CDATA[virtual pets]]></category>
		<category><![CDATA[Volkswagen augmented reality group]]></category>
		<category><![CDATA[Vuzix]]></category>
		<category><![CDATA[Wave]]></category>
		<category><![CDATA[Wave enabled augmented reality]]></category>
		<category><![CDATA[Web 2.0 Summit]]></category>
		<category><![CDATA[Yuri van Geest]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4670</guid>
		<description><![CDATA[ISMAR 2009 -Â  was an extraordinary mix ofÂ  high geek, academic eminence, gungho Dutch Cowboy entrepreneurial spirit, German engineering and industry, brilliant artistry, and invention, all fueled by a sense, and a very active presence in the case of Diamond Sponsor &#8211; Qualcomm, that the big technology players are waking up to augmented reality. In [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MetaioLayarpost.jpg"><img class="alignnone size-medium wp-image-4674" title="Metaio&amp;Layarpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MetaioLayarpost-300x199.jpg" alt="Metaio&amp;Layarpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DirkseesDirkonJunaiopost.jpg"><img class="alignnone size-medium wp-image-4676" title="DirkseesDirkonJunaiopost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DirkseesDirkonJunaiopost-300x199.jpg" alt="DirkseesDirkonJunaiopost" width="300" height="199" /></a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dirkwatchesdirkvcupost.jpg"><img class="alignnone size-medium wp-image-4675" title="dirkwatchesdirkvcupost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dirkwatchesdirkvcupost-300x199.jpg" alt="dirkwatchesdirkvcupost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/metaiodinasaurpost.jpg"><img class="alignnone size-medium wp-image-4678" title="metaiodinasaurpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/metaiodinasaurpost-299x201.jpg" alt="metaiodinasaurpost" width="299" height="201" /></a></p>
<p><a href="http://www.ismar09.org/" target="_blank">ISMAR 2009</a> -Â  was an extraordinary mix ofÂ  high geek, academic eminence, gungho Dutch Cowboy entrepreneurial spirit, German engineering and industry, brilliant artistry, and invention, all fueled by a sense, and a very active presence in the case of Diamond Sponsor &#8211; Qualcomm, that the big technology players are waking up to augmented reality.</p>
<p>In the picture sequence above (click on photos to enlarge),Â  <a href="http://twitter.com/metaioUS" target="_blank">Noora </a><span><span><a href="http://twitter.com/metaioUS" target="_blank">Guldemond</a></span></span><span><span>, <a href="http://www.metaio.com/" target="_blank">Metaio</a>, demonstrates <a href="http://www.junaio.com/" target="_blank">Junaio</a> (coming to an iphone near you Nov 2nd) to <a href="http://twitter.com/dirkgroten" target="_blank">Dirk Groten</a>, CTO of<a href="http://layar.com/" target="_blank"> Layar</a> (top left photo).Â  One of the nice social features of Junaio is that users can share the 3D augmented scenes they have created.Â  Noora is demoing this capability to </span></span><span><span>Dirk, and as you can see he cracks up when he sees theÂ  scene Noora has stored on her phone.Â  Dirk and I both recognize that this cute little dinosaur augmentation (close up above on bottom left) must have been created by <a href="http://www.metaio.com/company/" target="_blank">Peter Meier, CTO of Metaio</a>, during the Interoperability and Standards workshop earlier that day.Â  Metaio it seems were discussing standards while enjoying some 3D augmented back chat.<br />
</span></span></p>
<p><span><span> Both Dirk and I were active participants in the workshop too.Â  But little did we know that Peter Meier had introduced his little 3D dinosaur into our discussion while we diligently, and sometimes heatedly, debated the merits of XMPP, Wave Federation Protocol,Â  KML, ARML, VRML, X3D, andÂ  more!Â  The photo I took is on the bottom right of the four pics above. It was probably taken very shortly after Peter&#8217;s augmented Junaio scene.Â  Of course there is no little dinosaur in my pic ofÂ  Dirk Groten with <a href="http://twitter.com/JoeLudwig" target="_blank">Joe Ludwig</a> and <a href="http://twitter.com/markustripp" target="_blank">Markus Tripp of Mobilizy</a> who were discussing AR standards oblivious to Peter&#8217;s virtual pet in our midst.<br />
</span></span></p>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MarkusTrippPeterMeier.jpg"><img class="alignnone size-medium wp-image-4685" title="MarkusTrippPeterMeier" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MarkusTrippPeterMeier-300x199.jpg" alt="MarkusTrippPeterMeier" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Thereisawillingnesstostandardizepost.jpg"><img class="alignnone size-medium wp-image-4686" title="Thereisawillingnesstostandardizepost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Thereisawillingnesstostandardizepost-300x199.jpg" alt="Thereisawillingnesstostandardizepost" width="300" height="199" /></a><br />
</span></span></p>
<p><span><span>I must say I had noticed an impish look on Peter Meier&#8217;s face (see photo above on the left &#8211; Peter is wearing glasses and holding a phone).Â  And Markus Tripp, of MobilizyÂ  revealed a little bit of gaming of his own, when he let out that, in part, ARML is a provocation.Â  But Peter was clearly unfazed and enjoying himself.Â  Dirk, tasked to summarize our discussion, stalwartly maintained an optimistic but serious tone fitting for a standards discussion:Â  &#8220;There is a willingness to standardize&#8230;.,&#8221; he began (pic above on left &#8211; click to enlarge and read text). </span></span></p>
<p><span><span> But it was a little 3D dinosaur that, perhaps appropriately, had the last laugh. Fitting, as I am not sure whether anything anyone says about AR standards at the moment will hold up.Â  But, as Ori commented in <a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">his great post &#8211; an epilogue for ISMAR 2009,</a> the vibe was &#8220;Peace and Love&#8221; in AR Browser land (</span></span>although Chetan Damani of <a href="http://gamesalfresco.com/?s=%22acrossair%22" target="_blank">Across Air</a> was not in the standards discussion because he attended the UX/content? workshop instead)<span><span>.Â  But as they say, &#8220;all&#8217;s fair in love and war.&#8221;Â  And it is my feeling the games have barely begun!Â  There are many players (<a href="http://www.youtube.com/watch?v=KI4lB00Ht9o&amp;feature=player_embedded#" target="_blank">virtual pets </a>included) waiting in the wings. I met some at ISMAR, and they are just itching to join the frey.<br />
</span></span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/coopetitionpost.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ARConsortiumpost2.jpg"><img class="alignnone size-medium wp-image-4701" title="ARConsortiumpost2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ARConsortiumpost2-300x188.jpg" alt="ARConsortiumpost2" width="300" height="188" /></a><img class="alignnone size-medium wp-image-4690" title="coopetitionpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/coopetitionpost-300x185.jpg" alt="coopetitionpost" width="300" height="185" /></p>
<p><span><span>Ori Inbar, <a href="http://ogmento.com/" target="_blank">Ogmento </a>and Robert Rice, <a href="http://www.neogence.com/#/home" target="_blank">Neogence Enterprises</a>, both founders of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a>, made great efforts to set our young industry off on the right foot -Â  in theÂ  spirit of <a href="http://en.wikipedia.org/wiki/Coopetition" target="_blank">coopetition </a>(</span></span>a <a title="Neologism" href="http://en.wikipedia.org/wiki/Neologism">neologism</a> coined to describe <a title="Co-operation" href="http://en.wikipedia.org/wiki/Co-operation">cooperative</a> <a title="Competition" href="http://en.wikipedia.org/wiki/Competition">competition)</a><span><span>. See </span></span><a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">Curious Raven for </a><a href="http://curiousraven.squarespace.com/home/2009/10/23/ismar-09-observations-and-comments.html" target="_blank">Robert&#8217;s conference observations</a>, and <span><span><a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">Ori&#8217;s post on Games Alfresco</a> for more about </span></span>Mobile Augmented Reality at ISMAR 2009.Â  The Mobile Augmented Reality Workshops were driven by an indomitable spokesperson for the new AR industry, <a href="http://www.perey.com/" target="_blank">Christine Perey</a>.Â  Christine not only helped motivate discussion on the issue of oxygen to the system, i.e. business value, but also she was a very generous connector at the conference.</p>
<p><span><span><br />
</span></span></p>
<h3>What&#8217;s Next From Augmented Reality&#8217;s Top Chefs?</h3>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-7.15.58-PM.png"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-7.12.35-PM.png"><img class="alignnone size-medium wp-image-4692" title="Screen shot 2009-10-24 at 7.12.35 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-7.12.35-PM-300x196.png" alt="Screen shot 2009-10-24 at 7.12.35 PM" width="300" height="196" /></a><br />
</span></span></p>
<p>As Ori pointed out, <a href="http://www.imdb.com/name/nm0218033/" target="_blank">Kent Demaine</a>, <a href="http://www.ooo-ii.com/" target="_blank">oooii</a> (pic above is from the oooii web site), Minority report VFX designer was hanging out at ISMAR 2009 and he came to the panel I was on: &#8220;Augmented Reality in Sports,Â  Entertainment and Advertising.&#8221;Â  We chatted afterwords about instrumented environments and how this is such a key to development interesting augmented experiences.Â  Also I mentioned how back in the day I was involved in some of the early development of motion control software.Â  And it was great to hear Kent say they were still finding motion control cool at <a href="http://www.ooo-ii.com/" target="_blank">oooii</a>.Â  As Ori notes, he is the &#8220;guy with the most enviable AR credentials in the world (the guy who designed VFX for minority report)<strong>,&#8221;</strong><strong> </strong>and <a href="http://www.ooo-ii.com/" target="_blank">oooii</a> is busy and hiring.</p>
<p>One of the highlights of the Arts, Media and Humanities track for me was meeting <a href="http://jarrellpair.com/" target="_blank">JarrellÂ  Pair.</a> He really brought the best out in panelists with his well tuned questions.Â  The recording of ISMAR was comprehensive and videos should be up next week.Â  I will post the slides on Ugotrade of my presentation:Â  &#8220;The Next Wave of AR: Shared Augmented Realities and Remix Culture.&#8221;.</p>
<h3>&#8220;Mixed and Augmented Reality: &#8216;Scary and Wondrous&#8217;&#8221; &#8211; <a href="http://en.wikipedia.org/wiki/Vernor_Vinge" target="_blank">Vernor Vinge</a></h3>
<p><strong>&#8220;Imagine an environment where most physical objects know where they are, what they are, and can, (in principle) network with any other object. With this infrastructure, reality becomes its own database.Â  Multiple consensual virtual environments are possible, each oriented to the needs of its constituency.Â  If we also have open standards, then bottom-up social networks and even bottom up advertising become possible. Now imagine that in addition to sensors, many of these itsy-bitsy processors are equipped with effectors.Â  Then the physical world becomes much more like a software construct.Â  The possibilities are both scary and wondrous.&#8221;</strong> (<a href="http://en.wikipedia.org/wiki/Vernor_Vinge" target="_blank">Vernor Vinge</a> -Â  intro to ISMAR 2009)</p>
<p>Vernor Vinge&#8217;s short intro to ISMAR 2009 (which can be downloaded with the <a href="http://www.ismar09.org/" target="_blank">ISMAR 2009 schedule here)</a> captures the essence of the &#8220;Scary and Wondrous&#8221; dawn of the age of ubiquitous computing and mixed and augmented reality.Â  It is definitely worth a moment to download.Â  The future of augmented and mixed realities, as Vernor Vinge points out, is tied up in a &#8220;tension between centralized and distributed computing&#8221; that &#8220;will continue long into the future.&#8221; One ofÂ  my fascinations with Wave is that it offers a tantalizing opportunity to explore augmented reality in an open distributed architecture.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-12-at-2.40.39-PM.png"><img class="alignnone size-medium wp-image-4586" title="Screen shot 2009-10-12 at 2.40.39 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-12-at-2.40.39-PM-300x154.png" alt="Screen shot 2009-10-12 at 2.40.39 PM" width="300" height="154" /></a></p>
<p>At ISMAR, I talked with as many people as possible about the AR Wave project &#8211; <a href="../../2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/" target="_blank">see my post here for more about Wave enabled AR</a>.Â  Many people were very enthusiastic to join the AR wave and the only thing I really lacked was about 100 invites to hand out!</p>
<h3>&#8220;Everything, Everywhere &#8211; making visible the invisible&#8221;</h3>
<p>Some of the areas that I would have liked to see given more attention on at ISMAR were sensor networks, data curation, and user experience.Â  Not that these areas were entirely neglected with Pattie Maes, MIT as a keynote speaker, and Mark Billinghurst presenting on some fascinating work on social augmented experiences and user experience.Â  I highly recommend catching up on these and other ISMAR presentations when the videos go up.</p>
<p><a href="http://www1.cs.columbia.edu/~swhite/" target="_blank"><img class="alignnone size-medium wp-image-4716" title="Screen shot 2009-10-25 at 12.28.25 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-12.28.25-PM-300x57.png" alt="Screen shot 2009-10-25 at 12.28.25 PM" width="300" height="57" /></a></p>
<p>And, I was very happy to meet and talk to <a href="http://www1.cs.columbia.edu/~swhite/" target="_blank">Sean White</a> whose work at Columbia University is one of my inspirations (for more <a href="http://www1.cs.columbia.edu/~swhite/" target="_blank">about Sean&#8217;s work see here</a> or click image above):</p>
<p><strong>&#8220;the confluence of powerful connected mobile devices, advances in computer vision and sensing, and techniques such as augmented reality (AR) enables exciting new opportunities for interacting with this hidden network of dynamic information and shifts the locus of interaction from the desktop computer to the world around us&#8221;</strong></p>
<p>And I had several very interesting conversationsÂ  at ISMAR about developing social augmented experiences that connect us to a physical world that is becoming &#8220;much more like a software construct&#8221; (Vernor Vinge).Â  Dirk Groten, CTO of Layar mentioned a few interesting projects Layar has up their sleeves, including somethingÂ  Layar may be cooking up with <a href="http://www.roomwareproject.org/" target="_blank">The RoomWare Project.</a></p>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-10.03.00-PM.png"><img class="alignnone size-medium wp-image-4697" title="Screen shot 2009-10-24 at 10.03.00 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-10.03.00-PM-300x231.png" alt="Screen shot 2009-10-24 at 10.03.00 PM" width="300" height="231" /></a><br />
</span></span><br />
The picture above is of RoomWare&#8217;s Social RFID Installation for Media Plaza in Utrecht (<a href="http://blog.roomwareproject.org/2008/10/06/social-rfid-installation-for-media-plaza/">read more here</a>).</p>
<h3>Demos Galore!</h3>
<p>In the demo rooms,<a rel="cc:attributionURL" href="http://augmentation.wordpress.com/2009/10/24/ismar-ismar-ismar-where-to-start/augmentation.wordpress.com"> Noah Zerkin</a> (pic below left) pretty much single handedly carried the AR flag for a growing community of augmented reality Makers and Hackers.Â  But his presence was much appreciated, and he tirelessly demoed <a href="http://zerkinglove.com/" target="_blank">The Zerkin Glove.</a> See <a href="http://augmentation.wordpress.com/2009/10/24/ismar-ismar-ismar-where-to-start/" target="_blank">the first of what may be several posts from Noah on ISMAR here</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/noah2post.jpg"><img class="alignnone size-medium wp-image-4700" title="noah2post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/noah2post-300x199.jpg" alt="noah2post" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixgogglespost.jpg"><img class="alignnone size-medium wp-image-4704" title="Tish&amp;Vuzixgogglespost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixgogglespost-300x199.jpg" alt="Tish&amp;Vuzixgogglespost" width="300" height="199" /></a></p>
<p>And I got to try out the Vuzix goggles (picture above on right).Â Â  This was my first experience playing an AR game that was smart about real world gravity. It&#8217;sÂ  &#8220;an <span>augmented reality</span> marble game that uses gravity as a <span>game controller</span>&#8221; &#8211; see <a href="http://gamesalfresco.com/2009/08/09/augmented-reality-has-gained-gravity/" target="_blank">Ori Inbar&#8217;s write up here</a>.Â  It was a very compelling experience and I have to say I didn&#8217;t really notice the shortcomings of the Vuzix goggles while I was absorbed in the game. AndÂ  I turned out to be quite good at the game too. It is intuitive unlike the kind ofÂ  rule based games I never have time to learn properly.Â  But what is so special about this project is the tools that it is built with are open, and available for all, and affordable (see this <a href="http://gamesalfresco.com/2009/08/09/augmented-reality-has-gained-gravity/" target="_blank">list on Games Alfresco</a>).</p>
<p>It was a great pleasure to meet <a href="http://www1.cs.columbia.edu/~feiner/" target="_blank">Prof. Steven Feiner</a> (picture on below the left) who heads Columbia University&#8217;s brilliant AR research team at <a href="http://graphics.cs.columbia.edu/top.html" target="_blank">The Columbia University Graphics and User Interfaces Lab.</a></p>
<p>Ori Inbar (pic below on right) also spent a lot of time in the demo room showing off Ogmento&#8217;s lovely AR learning game that delighted attendees, <a href="http://ogmento.com/"><strong>â€œPut a Spell: Learn to Spell with Augmented Reality.â€</strong></a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixpost.jpg"><img class="alignnone size-medium wp-image-4703" title="TishVuzixpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixpost-199x300.jpg" alt="TishVuzixpost" width="199" height="300" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Ogmentopost.jpg"><img class="alignnone size-medium wp-image-4702" title="Ogmentopost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Ogmentopost-199x300.jpg" alt="Ogmentopost" width="199" height="300" /></a></p>
<p>For a round up ofÂ  what&#8217;s next for augmented reality head mounted displays check out, <a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">Games Alfresco here</a>, and Thomas Carpenter&#8217;s excellent review of the <a href="http://thomaskcarpenter.com/2009/10/21/ismar09-hmd-review/">head mounted displays.</a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/GeorgandBlairpost.jpg"><img class="alignnone size-medium wp-image-4712" title="GeorgandBlairpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/GeorgandBlairpost-300x199.jpg" alt="GeorgandBlairpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/cypherpost.jpg"><img class="alignnone size-medium wp-image-4713" title="cypherpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/cypherpost-300x199.jpg" alt="cypherpost" width="300" height="199" /></a></p>
<p><strong>Ori Inbar on Games Alfresco asks is &#8220;Microsoft â€“ the new big player to watch</strong>?&#8221;Â Â  &#8220;<a href="http://www.robots.ox.ac.uk/%7Egk/" target="_blank">Georg Klein</a>, inventor of <a href="http://www.youtube.com/watch?v=pBI5HwitBX4" target="_blank">PTAM-on-an-iPhone</a> (and the smartest Computer Vision guy on the block)&#8221; has joined Microsoft to make Mobile AR.</p>
<p>The picture on the left above shows Georg trying out <a href="http://www.youtube.com/watch?v=Cix3Ws2sOsU&amp;feature=player_embedded" target="_blank">ARhrrr</a> with Blair MacIntyre.Â Â  And on the right Blair is demoing his marker card pack to Senior Vice President of Cypher Entertainment, David Elmekies.Â  Yes ISMAR was abuzz with demos. See<a href="http://compscigail.blogspot.com/2009/10/ismar09-few-demos.html" target="_blank"> </a><a href="http://compscigail.blogspot.com/2009/10/ismar09-few-demos.html" target="_blank">this post</a> from Gail Carmichael for more video demos.</p>
<h3>Next Year ISMAR 2010 in Korea!</h3>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ISMARBanquet.jpg"><img class="alignnone size-medium wp-image-4693" title="ISMARBanquet" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ISMARBanquet-300x199.jpg" alt="ISMARBanquet" width="300" height="199" /></a></span></span></p>
<p><span style="font-weight: normal;"><span style="font-weight: bold;"><span style="font-size: 0.800001em;"> </span></span></span>At the banquet, I managed to find a seat at a table with Sean White (at left in photo above with Christine Perey to his right) and the Columbia University team.Â  The banquet culminated with the â€œPast and Future of ISMARâ€ Panel chaired valiantly by Jay Wright of Qualcomm.Â  We were asked to offer our input for ISMAR 2010.Â  I offered up an idea that I have been nurturing for a while now -Â  to stage a &#8220;Green Tech AR Competition.&#8221;Â  Perhaps, I suggested, we could <span id="zx-." title="Click to view full content">base the competition around a conference (ISMAR 2010 in Korea?) and set up a target rich, instrumented environment for the occassion.Â  I think the Arduino open hardware community and AR developers have a synergy that is just waiting to be explored!Â  And, if we add the innovators of data curation to the mix, e.g., Pachube, AMEE, and Path Intelligence&#8230;(Markus Tripp left ISMAR to speak on a <a href="http://www.web2summit.com/web2009" target="_blank">Web 2.0 Summit</a> panel, <a href="http://www.readwriteweb.com/archives/humans_as_sensors.php" target="_blank">&#8220;Humans as Sensors,&#8221;</a> which also included Path Intelligence, Deborah Estrin on <a href="http://research.cens.ucla.edu/people/estrin/" target="_blank">&#8220;participatory sensing,&#8221;</a> and the brilliant work of <a href="http://twitter.com/dianneisnor" target="_blank">Di-Ann Eisnor</a>, <a href="http://platial.com/" target="_blank">Platial</a>, on &#8220;Transactional Cartography&#8221;).Â  Anyway a big Green tech AR competition could get people working together across the broad spread of AR terrain on some of the sticky problems of user experience.Â  And, with a high level of support from Smart Phone companies, HMDs manufacturers and the chip makers we just might come up with some extraordinary magic.<br />
</span></p>
<p><span id="zx-." title="Click to view full content"> The devil of course will be in the details.Â  But a competition like this could not only motivate key players to come together in the spirit of coopetition but also be an opportunity to show the world the power of AR to make visible the invisible ecosystems that are so important to the health of our planet.<br />
</span></p>
<p>One of the notable presences at ISMAR 2009 was the Qualcomm team.Â Â  Jay Wright&#8217;s presentation (an exclusive for ISMAR) not only outlined AR for 2012, but Jay also talked about some &#8220;close to the metal&#8221; innovation that we will see from Qualcomm very, very soon!Â  I had some time in the press room with Jay and his team prompted by <a href="http://www.mobilemonday.nl/" target="_blank">MoMo&#8217;s </a>Yuri van Geest.Â  When I twittered about Qualcomm&#8217;s presentation at ISMAR, Yuri replied:<strong><br />
</strong></p>
<p><a href="http://twitter.com/vanGeest" target="_blank">vangeest</a> <a href="http://twitter.com/TishShute" target="_blank">&#8220;@tishshute</a>: good stuff, hopefully you will integrate the neat new solutions and ideas in your talk in November ;)&#8221;</p>
<p><strong> </strong>I will be presenting at <a href="http://www.mobilemonday.nl/" target="_blank">MoMo #13</a> on AR, open AR, future of AR and GeoWeb,Â  and hopefully will bring some good news from Qualcomm too.Â  Anyway Jay seemed to like the idea of a Green Tech AR Competition, even though I did stress that I thought it needed some serious sponsorship and BIG prizes.</p>
<p><strong><br />
</strong></p>
<h3>Where&#8217;s the beef? Tracking and Mapping at ISMAR 2009</h3>
<p>On the flight from NYC to Orlando and ISMAR&#8217;o9 I dozed (I had been up late preparing my presentation) and I watched the Dew Tour Pro Skateboard competition and Top Chef on the Food Channel.Â  In this particular episode of Top Chef, the aspiring chefs were all given a brown bag of ingredients by an already famous chef who then judged whether the contenders managed to make a delicious meal with their allotment which was notably lacking in key ingredients of haute cusine.</p>
<p>This metaphor ofÂ  trying to cook up a great meal while perhaps missing the staples is apt for the current early stage of commercial augmented reality.Â  And when I arrived in Orlando, not only were the Dew Tour pro skateboarders staying at the same hotel as ISMAR, but ISMAR itself felt remarkably like an Augmented Reality Top Chef Coopetition.</p>
<p>Much of ISMAR was dedicated to the task ofÂ  providing the meat and potatoes of Augmented Reality, solutions to mobile tracking, mapping and registration, particularly in the Science and Technology track.</p>
<p>Industrial and Military Augmented reality solutions I found out, typically, solve the tracking problems by using fixed mounts which clearly wouldn&#8217;t translate well into the AR everywhere with everything mobile consumer culture expects.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DanielPustkapost.jpg"><img class="alignnone size-medium wp-image-4679" title="DanielPustkapost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DanielPustkapost-300x199.jpg" alt="DanielPustkapost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-2.41.56-PM.png"><img class="alignnone size-medium wp-image-4726" title="Screen shot 2009-10-25 at 2.41.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-2.41.56-PM-300x208.png" alt="Screen shot 2009-10-25 at 2.41.56 PM" width="300" height="208" /></a></p>
<p><em>In the picture on the left Fabian Doil stands by the VW engine that provided some of the outdoor targets for the ISMAR tracking competition.Â  On the right is a picture from the VW&#8217;s presentation on their research and development of AR.</em></p>
<p>I followed the tracking contest, organized by Daniel Pustka and Fabian Doil of Volkswagen, quite closely. And I learned a lot in the process. WhileÂ  it is clear there has been progress in AR mapping and tracking, we still have a ways to go.</p>
<p>But hanging around the Tracking Competition was a good way to find out the state of play of this crucial part of the AR dream.Â  For example,Â  a little tidbit I learned is that <a href="http://www.gris.informatik.tu-darmstadt.de/~mgoesele/" target="_blank">Michael Goesele </a>who has been reconstructing &#8220;high-quality geometry models from images collected from the internet (so called community photo collections, CPC)&#8221; is soon to be at the <a href="http://www.ini-graphics.net/ini-graphicsnet/members/fraunhofer-institut-fuer-graphische-datenverarbeitung-igd.html" target="_blank">Institut Graphische Datenverarbeitung</a> where top contenders in the tracking contest &#8211; Harald WuestÂ  and Folker Weintipper (in the foreground of the photo at the left and right respectively) are also to be found. [update Harold and Folker were the winning team <a href="http://docs.google.com/gview?a=v&amp;pid=gmail&amp;attid=0.1&amp;thid=1248dd2927becb21&amp;mt=application%2Fpdf&amp;url=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3D2%26ik%3De77cfddae9%26view%3Datt%26th%3D1248dd2927becb21%26attid%3D0.1%26disp%3Dattd%26zw&amp;sig=AHBy-hbcqUsaRNjbqpHO8vAF_vJqfDrMig" target="_blank">see here for details of scoring and results</a>!] Otto Korkalo and Tuomas Kantonen of VTT, Finland, Augmented Reality team are in the background. They have been working on the joint IBM, Nokia and VTT project that brings, <a href="http://www.marketwatch.com/story/researchers-from-ibm-nokia-and-vtt-bring-avatars-and-people-together-for-virtual-meetings-in-physical-spaces-2009-10-19" target="_blank">Avatars and People Together for Virtual Meetings in Physical Spaces.</a></p>
<p>The picture on the right is another team that were doing very well. If my notes serve me well (and please forgive me if they don&#8217;t. I came back with my card wallet overflowing!) the photo on the right showsChristian Waechter (on the left) and Peter Keitler (on the right) of the <a href="http://portal.mytum.de/welcome" target="_blank">Technische Universitat Munchen</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/trackingcompetitionpost.jpg"><img class="alignnone size-medium wp-image-4672" title="trackingcompetitionpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/trackingcompetitionpost-300x199.jpg" alt="trackingcompetitionpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition2post.jpg"><img class="alignnone size-medium wp-image-4681" title="Trackingcompetition2post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition2post-300x199.jpg" alt="Trackingcompetition2post" width="300" height="199" /></a></p>
<p>Germany is certainly leading the way in industrial AR. And I learned how small businesses like Metaio get to work with top research institutions and big companies like VW, thanks to very strong German funding program for AR and VR. The current iteration of a series of funding programs isÂ  called<a href="http://www.avilus.de/" target="_blank"> Avilus</a>.Â  AvilusÂ  is putting 42 million Euros into AR and VR this year alone (click on the slide below to see more about Avilus ).</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-1.08.48-AM.png"><img title="Screen shot 2009-10-24 at 1.08.48 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-1.08.48-AM-300x212.png" alt="Screen shot 2009-10-24 at 1.08.48 AM" width="300" height="212" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-2.04.50-AM.png"><img class="alignnone size-medium wp-image-4673" title="Screen shot 2009-10-24 at 2.04.50 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-2.04.50-AM-300x202.png" alt="Screen shot 2009-10-24 at 2.04.50 AM" width="300" height="202" /></a></p>
<p>I wish we had the equivalent of Avilus here in the US.Â  But there is no equivalent to Arvilus for AR here, andÂ  no AR isÂ  being developed by the US car industry either it seems.Â  But look at the slide above to get a taste of some of the cool stuff Metaio and other small AR and VR businesses do for VW through the Avilus project.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/VWtrackinggudrunpost.jpg"><img class="alignnone size-medium wp-image-4682" title="VWtrackinggudrunpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/VWtrackinggudrunpost-300x199.jpg" alt="VWtrackinggudrunpost" width="300" height="199" /></a></p>
<p>I also got to meet many people from one of the world&#8217;s most important AR hubs -Â  The Department of Informatics, <a href="http://portal.mytum.de/welcome" target="_blank">Technische Universitat Munchen</a>, including Prof. Gudren Klinker on the far right in pic above.Â  And from left to right, Fabian Doil (VW, co-organizer of contest), Sebastian Lieberknecht , Selim Ben Himane (Metaio), Tobias Eble (Metaio).Â  Prof. Klinker is the engine behind much of German innovation in AR.</p>
<p>Metaio was one of the few teams to rely mainly on markerless tracking which in this contest was very challenging because of the very different light conditions (see pics below) between the windowless interior and dazzling Florida sunshine outside (pic on the right shows targets under ideal lighting conditions).Â  Many people in the US may beÂ  familiar with Metaio&#8217;s consumer applications, like Junaio,Â  but thanks to Germany&#8217;s efforts to nurture augmented and virtual reality they are also respected software developers in industrial AR.Â  And I suspect that Metaio will spearhead markeless tracking in consumer AR too.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition5post.jpg"><img class="alignnone size-medium wp-image-4740" title="Trackingcompetition5post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition5post-300x199.jpg" alt="Trackingcompetition5post" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-7.47.44-PM.png"><img class="alignnone size-medium wp-image-4745" title="Screen shot 2009-10-25 at 7.47.44 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-7.47.44-PM-300x229.png" alt="Screen shot 2009-10-25 at 7.47.44 PM" width="300" height="229" /></a></p>
<p>This post as usual has already expanded to something much longer than I originally attended &#8211; pretty typical for me! There is much I have not been able to cover including some of the interesting contributions by augmented reality artists at ISMAR &#8211; again I recommend the upcoming videos.</p>
<p>But I cannot end without a hat tip to, Oriel, Nate et al. who won the best student paper award for AR Sketch &#8211; again please <a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">see Games Alfresco for more on this</a> (pic below from Games Alfresco). AR Sketch, Ori notes, is featured &#8220;in our <a href="http://gamesalfresco.com/2009/10/16/ismar-2009-sketch-and-shape-recognition-preview-from-ben-gurion-university/" target="_self">top post</a> and popular <a href="http://www.youtube.com/watch?v=M4qZ0GLO5_A" target="_blank">video</a>.&#8221; And</p>
<p><strong>&#8220;Their work is revolutionizing the AR world by avoiding the need to print markers â€“ or any images whatsoever.&#8221;</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-1.58.35-PM1.png"><img class="alignnone size-medium wp-image-4719" title="Screen shot 2009-10-25 at 1.58.35 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-1.58.35-PM1-300x223.png" alt="Screen shot 2009-10-25 at 1.58.35 PM" width="300" height="223" /></a><br />
</strong></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/10/24/ismar-2009-an-augmented-reality-top-chef-coopetition/feed/</wfw:commentRss>
		<slash:comments>9</slash:comments>
		</item>
		<item>
		<title>AR Wave: Layers and Channels of Social Augmented Experiences</title>
		<link>https://www.ugotrade.com/2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/</link>
		<comments>https://www.ugotrade.com/2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/#comments</comments>
		<pubDate>Tue, 13 Oct 2009 18:52:42 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[virtual communities]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Amphibious Architecture]]></category>
		<category><![CDATA[AR Blip]]></category>
		<category><![CDATA[AR Browser]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[augmentaion]]></category>
		<category><![CDATA[augmented reality search]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Channels and Social Augmented Realities]]></category>
		<category><![CDATA[citi sensing]]></category>
		<category><![CDATA[citizen sensing]]></category>
		<category><![CDATA[Clayton Lilly]]></category>
		<category><![CDATA[cybernetics vs ecology and human waste]]></category>
		<category><![CDATA[distributed]]></category>
		<category><![CDATA[eco mapping]]></category>
		<category><![CDATA[Gene Becker]]></category>
		<category><![CDATA[geoAR]]></category>
		<category><![CDATA[geospatial web]]></category>
		<category><![CDATA[geospatial web and augmented reality]]></category>
		<category><![CDATA[Goggle Wave Federation Protocol]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Google Wave as an AR enabler]]></category>
		<category><![CDATA[Google Wave enable augmented reality]]></category>
		<category><![CDATA[Google Wave Protocols]]></category>
		<category><![CDATA[green tech augmented reality]]></category>
		<category><![CDATA[immersive sight]]></category>
		<category><![CDATA[Jeremy Hight]]></category>
		<category><![CDATA[Joe Lamantia]]></category>
		<category><![CDATA[Layers]]></category>
		<category><![CDATA[layers and channels of augmented reality]]></category>
		<category><![CDATA[Life Clipper]]></category>
		<category><![CDATA[life streaming]]></category>
		<category><![CDATA[location based media]]></category>
		<category><![CDATA[location based services]]></category>
		<category><![CDATA[locative media]]></category>
		<category><![CDATA[locative narratives]]></category>
		<category><![CDATA[Mannahatta]]></category>
		<category><![CDATA[map based augmentation]]></category>
		<category><![CDATA[mapping]]></category>
		<category><![CDATA[modulated mapping]]></category>
		<category><![CDATA[modulated napping]]></category>
		<category><![CDATA[multi-user]]></category>
		<category><![CDATA[narrative archaeology]]></category>
		<category><![CDATA[Natural Fuse]]></category>
		<category><![CDATA[neogeography]]></category>
		<category><![CDATA[networked urbanism]]></category>
		<category><![CDATA[non euclidian geometry]]></category>
		<category><![CDATA[open augmented reality framework]]></category>
		<category><![CDATA[Seanseable Labs]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[shared augmented realities]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented reality experiences]]></category>
		<category><![CDATA[sound augmentation]]></category>
		<category><![CDATA[Thomas K. Carpenter]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Trash Track]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[virtual reality]]></category>
		<category><![CDATA[Wave as a platform for augmented reality]]></category>
		<category><![CDATA[Wave Blip]]></category>
		<category><![CDATA[Wave Bots]]></category>
		<category><![CDATA[Wave playback]]></category>
		<category><![CDATA[Wave playback feature]]></category>
		<category><![CDATA[Wave Robots]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4585</guid>
		<description><![CDATA[It is now nearly two weeks since the Google Wave preview launch and I am happy to say we have some AR Wave news. The diagram above shows Thomas Wrobelâ€™s basic concept for a distributed, multi-user, open augmented reality framework based on the Google Wave Federation Protocol and servers (click on the image to see [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://lostagain.nl/tempspace/PrototypeDiagram3_wave.html" target="_blank"><img class="alignnone size-medium wp-image-4586" title="Screen shot 2009-10-12 at 2.40.39 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-12-at-2.40.39-PM-300x154.png" alt="Screen shot 2009-10-12 at 2.40.39 PM" width="300" height="154" /></a></p>
<p>It is now nearly two weeks since the <a href="http://wave.google.com/" target="_blank">Google Wave </a>preview launch and I am happy to say we have some AR Wave news. The diagram above shows Thomas Wrobelâ€™s basic concept for a distributed, multi-user, open augmented reality framework based on the <a href="http://www.waveprotocol.org/" target="_blank">Google Wave Federation Protocol</a> and servers (click on the image to see the dynamic annotated sketch <a href="http://lostagain.nl/tempspace/PrototypeDiagram3_wave.html" target="_blank">or here</a>).</p>
<p>Even in the short time we have had to explore Wave, some very exciting possibilities are becoming clear. Thomas puts some of the virtues of Wave as an AR enabler succinctly when he writes:</p>
<p><strong>â€œWave allows the advantages of both real-time communication, as well as the advantages of persistent hosting of data. It is both like IRC, and like a Wiki. It allows anyone to create a Wave, and share it with anyone else. It allows Waves to be edited at the same time by many people, or used as a private reference for just one person.</strong></p>
<p><strong>These are all incredibly useful properties for any AR experience, more so Wave is open. Anyone can make a server or client for Wave. Better yet, these servers will exchange data with each other, providing a seamless world for the userâ€¦..a single login will let you browse the whole world of public waves, regardless of whoâ€™s providing or hosting the data. Wave is also quite scalable and secureâ€¦data is only exchanged when necessary, and will stay local if no one else needs to view it.</strong></p>
<p><strong>Wave allows bots to run on itâ€¦allowing blips in a waves to be automatically updated, created or destroyed based on any criteria the coders choose. Wave even allows the playback of all edits since the wave was created.</strong></p>
<p><strong>For all these reasons and more, Wave makes a great platform for AR.â€</strong></p>
<p>There will be much more <span>coming soon on Wave enabled AR because the Google Wave invites have begun to flow out to a wider community now. This week, many of our small ad-</span>hoc group looking at the development challenges and implications of Google Wave for AR actually got into Wave for the first time.</p>
<p>Many thanks to all the people who have contributed to this discussion so far including: Thomas Wrobel, Thomas K. Carpenter, Jeremy Hight, Joe Lamantia, Clayton Lilly, Gene Becker and many others.</p>
<p>We will be setting up some public AR Framework Development Waves this week.Â  If you have any trouble finding them, or adding yourself to it, please add Thomas and I to your contact list.Â  I am tishshute@googlewave.comÂ  Thomas is darkflame@googlewave.comÂ  The first two are currently called:<strong> </strong></p>
<p><strong><br />
AR Wave: Augmented Reality Wave Framework Development</strong> (developer forum)</p>
<p><strong>AR Wave: Augmented Reality Wave Development</strong> (for general discussion)</p>
<p>The discussion so far has been in two areas. On the one hand, it is gear-heady and focused on the <a href="http://www.waveprotocol.org/" target="_blank">Google Wave Federation Protocol</a>, code, development challenges, and interfacing to mobile, while on the other hand people have been looking at use cases and questions of user experience.</p>
<p>Distributed, â€œshared augmented realities,â€ or â€œsocial augmented experiences&#8221; â€“ that not only allow mashups, &amp; multisource data flows, but dynamic overlays (not limited to 3d), created by users, linked to location/place/time, and distributed to other users who wish to engage with the experience by viewing and co-creating elements for their own goals and benefit &#8211; are something very new for us to think about.</p>
<p>As, Joe Lamantia, puts it, now:</p>
<p><strong>â€œthereâ€™s a feedback loop between which interactions are made easy by any given combo of device;/ hardware / software / connectivity, and the ways that people really work in real life (without any mediation / permeation by tech).â€</strong></p>
<p>Joe Lamantia whose term, <strong>â€œsocial augmented experiencesâ€</strong> I borrow for this post title, has done some thinking about <strong>â€œconcepts and models for understanding and contributing to shared augmented experiences, such as the social scales for interaction, and the challenges attendant to designing such interactions.â€ </strong>Check out <a href="http://www.joelamantia.com/" target="_blank">Joe Lamantia&#8217;s blog </a>for more on this later this week.</p>
<p>It is very helpful, as Joe points out, to shift the focusÂ  back and forth between the experience and the medium.</p>
<p>It is super exciting to have clear evidence that shared augmented realities are no longer merely possible, but highly probable and actually do-able now.</p>
<p>I shouldÂ  be absolutely clear about what Google Wave does to enable AR because obviously Wave plays no role in solving image recognition and tracking/registrations issues.Â  But, for example, Wave protocols and servers do provide a means to exchange, edit, and read data, and that enables distributed, social augmented realities.</p>
<p>Thomas explains how the newly named &#8220;AR Blip&#8221; works as:</p>
<p><strong>&#8220;An AR Blip is simply a Blip in wave containing AR data. Typically this would be the positional and url data telling a AR browser to position a 3d object at a location in space.</strong></p>
<p><strong>In more generic terms, an AR Blip allows data of various forms (meshes,text,sound) to be given a real-world position.&#8221;</strong></p>
<p>I have mentioned in other posts (<a href="http://www.ugotrade.com/2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/" target="_blank">here</a> and <a href="http://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/" target="_blank">here</a>) that Wave can be used for AR as precise or as loose as the current generation devices can handle. And as the hardware and software for the kind of AR that can put media out in the world to truly immerse you in a mixed space, the frameworkÂ  shouldÂ  be able to handle this too.</p>
<p>(a note on the Wave playback feature &#8211; this opens up a whole new world of possibilities.Â  Check out <a href="http://snarkmarket.com/2009/3605" target="_blank">this post</a> on some of the implications of playback for writing!)</p>
<p>The use cases we have been coming up with are too numerous to go into in detail this post<span>.Â  The open nature of an AR framework/Wave standard will lead to many new applications we have barely begun to imagine.Â  As Thomas points out, different client software can be made for browsing, potentially allowing for various specialist browsers, as well as more generic ones for typicalÂ  use. T</span>he multitudes of different kinds of data in/output that could be integrated in an open AR framework as it evolves are mind boggling.</p>
<p>But, for now, someÂ  obvious use cases do come to mind:<br />
eg.</p>
<p>- Historical environmental overlays showing how a city used to be/and how this vision may be constructed differently by different communities</p>
<p>- Proposed building work showing future changes to a structure/and the negotiations of this future (both the public and professionals could submit their own comments to the plans in context), seeing pipes, cables and other invisible elements that can help builders and engineers collaborate and do their work.</p>
<p>- Skinning the world with interactive fantasies</p>
<p>I asked Thomas to help people understand how Wave enables new interactions to data by explaining how Wave could enable citi sensing and citizen sensing projects (e.g.<a href="http://tinyurl.com/y97d5zr" target="_blank"> this one being pioneered by Griswold</a>):</p>
<p><strong><strong>&#8220;Sensors, both mobile and static could contribute environmental data into city overlays;</strong></strong></p>
<div><strong><strong>â€”temperature, windspeed, air quality (amounts of certain particles) water quality, amount of sunlight, Co2 emissions could all be feed into different waves. The AR Wave Framework makes it easy to see any combination of these at the same time.&#8221;</strong></strong></div>
<div><strong><strong><br />
</strong></strong></div>
<p><strong><strong> </strong></strong>Having these invisible aspects of the world made visible would create ways to improve sustainability, social equity, urban management, energy efficiency, public health, and allow communities to understand and become active participants in the ecosystems and infrastructure of their neighborhoods.</p>
<p>The key is reflecting thisÂ  kind of data back to people &#8220;making it not back story but fore story,&#8221; right where we are, right where it happens, as well as having it available for analysis.</p>
<p>As well asÂ  creating new opportunities to interact/respond to/and enhance data, making visible the invisible as <a href="http://www.environmentalhealthclinic.net/people/natalie-jeremijenko/" target="_blank">Natalie Jeremijenko&#8217;s</a> work on <a href="http://www.amphibiousarchitecture.net/" target="_blank">Amphibious Architecture</a> and <a href="http://www.haque.co.uk/" target="_blank">Usman Haque&#8217;s</a> project <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a> shows, can also create new connections/understandings between humans and the non human&#8217;s that share our world, e.g. fish, plants, waterways.</p>
<p>At a more prosaic levelÂ  potential buyers of property could see more clearly what they are buying, city planners could see better what needs to be worked on, and environmental researchers could see more clearly the impact people are having on an area.</p>
<p>Also Wave can provide some of the framework necessary to begin to begin to address tricky problems of privacy. Sensitive data can be stored on private waves, e.g. medical data for doctors and researchers, but the analysis of theÂ  data could still be of benefit to all, e.g., if it&#8217;s tied disease occurrences to locations andÂ  relationships between the environmental data and health wereâ€¦quite literallyâ€¦made visible.</p>
<p><strong>&#8220;The publication of energy consumption and making it visible as overlays, could help influence the public into supporting more energy efficiency companies and businesses. It could also help citizens to try to keep their own energy usage down, to try to keep their street in â€œthe green.â€</strong></p>
<p>Thomas notes:</p>
<p><strong>&#8220;With all of the above, it becomes fairly trivial to write persistent Wave-bots that automatically send notice when certain criteria are met (pollutants over a certain level, for example). On publicly readable waves, anyone can use the data in their local computers, process it, and contribute results back on a new wave. Alternatively, persistent remote severs could run Cron jobs, or other automated processing, using services such as App Engine to run wave robots.</strong></p>
<p><strong>All these possibilities become â€œfreeâ€ when using Wave as a platform for geographically tied data.&#8221;</strong></p>
<p>But of course this is just the beginning!</p>
<p><em>Recently, I talked at length with Jeremy Hight who has been thinking about, designing and creating shared augmented realities, that anticipate the kind of dynamic, real time, large scale architecture we now have available through Wave,Â  for quite some time now.Â Â  This is exciting stuff. </em></p>
<p><em><br />
</em></p>
<h3><strong>Modulated Mapping:</strong> Talking with Jeremy Hight about Layers, Channels andÂ  Social Augmented Experiences</h3>
<p><strong><strong> </strong></strong></p>
<p><strong><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modulatedmapping5.jpg"><img class="alignnone size-medium wp-image-4611" title="modulatedmapping5" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modulatedmapping5-230x300.jpg" alt="modulatedmapping5" width="230" height="300" /></a><br />
</strong></strong></p>
<p><strong><strong><em><span>image from Volume Magazine (Hight/Wehby)</span></em></strong></strong></p>
<p><strong><strong>Tish Shute:</strong></strong> I know you have been involved in locative media from its early days. Perhaps we can talk about how AR continues the locative media journey?</p>
<p><a href="http://www.cc.gatech.edu/~blair/home.html" target="_blank">Blair MacIntyre</a> gave me this distinction, recently:<em> &#8220;AR is about systems that put media out in the world, and immerse you in a mixed space. Â Even the current &#8220;not really registered&#8221; mobile phone AR systems are still &#8220;sort of&#8221; AR (e.g., Layar, etc).</em></p>
<p><em>Locative media/ubicomp/etc are very different, in that they tend to display media on a device (phone screen) that is relevant to your context, but does not attempt to merge it with the world.<br />
The difference is significant, and making it clear helps people think about what they do and what they want to do, with their work. The locative media space though points toward future AR systems (when the technology catches up!).&#8221;</em></p>
<p><strong><strong>Jeremy Hight: The need is to finish the arc that locative media and early AR have started and to now truly return to the map itself, but as an internet of data, interactivity, channels of data , end user options like analog machines once were but in high end tools, a smart AI-ish ability for it to cull data for the user, and to allow social networking to be in real world places on the map both in building augmentation and in using and appreciating it..not hacks..which have their place&#8230;but a rhizome, a branched system with shared root,end user adjustable and variable..this is the key.</strong></strong></p>
<p><strong><strong>This takes AR and mapping and makes a possible world of channels in space and this eventually can be a kind of net we see in our field of vision with a selected percentage of visual field and placement so a geo-spatial net, a local to world wide fusion of lm into a tool and educational tool</strong></strong></p>
<p><strong><strong><span>VR[virtual reality] has greatly advanced, but in nodes as it has limitationsâ€¦LM [locative media] is the sameâ€¦AR [augmented reality] is the way..</span></strong><strong> it now has locative elements and aspects of VR integrated into its functionality and nodes&#8230;it is the best option with all of these elements, greater hybridity and data level potential a well as end user and community sourcing potential</strong></strong></p>
<p><strong><strong>I wrote an essay for Archis&#8217; Volume, the architecture magazine on a near future sense of some of this&#8230;.a visual net on the lens like ar but with smart objects and social networking and dissent.</strong></strong></p>
<p><strong><strong>I also wrote of these things for immersive graphic design, spatially aware museumÂ  augmentation, education through ar and lm and nod to the base interface of eye to cerebral cortex in layered and malleable augmentation in my essay <a href="http://www.neme.org/main/645/immersive-sight" target="_blank">&#8220;Immersive Sight&#8221;</a> a few years back</strong></strong></p>
<div id="gqg9" style="text-align: left;"><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_3dj7g8zf7_b.jpg"><img class="alignnone size-medium wp-image-4601" title="dgznj3hp_3dj7g8zf7_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_3dj7g8zf7_b-300x225.jpg" alt="dgznj3hp_3dj7g8zf7_b" width="300" height="225" /></a></strong></div>
<p><strong><strong>image [above] is simple illustration of a possible example on a screen or in front of eye where in a mondrian show..the graphic design of information actually builds as one moves</strong></strong></p>
<p><strong><strong>(key is calibrated spatial intervals and related layers of further augmentation which is logical due to location and proximity)</strong></strong></p>
<p><strong><strong>from immersive sight on immersive graphic design:</strong> <em>&#8220;The design can work with this in a way that creates an interactive supplemental set of information that is malleable, shifts based on location, builds and peels away as one moves closer to a work and plays with the forms of the works and the elements of the space itself. The sequence can contain many different elements and their interplay (both in the field of vision and in terms of context and layers of information). This is the model of sections of augmentation turning on and off at key points as individual spatial and concepts moments and nodes.</em></strong></p>
<p><strong><em>Another interesting possibility is that individual points of augmentation donâ€™t turn off, but instead are designed to build as one moves in a direction toward a specific part of the exhibit. The design can work in a sequence both content wise and visually in terms of a delay powered compositional development and style in which each discreet layer of text and image does not fade out, but builds on each other into a final composition. This can form paintings similar to Mondrian perhaps if it is a show of similar works of that era or it can form something much more metaphorical and open interpretation of the space and content but utilizing a sense of emergence spatially in terms of the composition (pieces laid bare until final approach for effect). </em></strong></p>
<p><strong><em>Each section will be well designed, but they build in layers as one moves until finally forming the final composition both visually and in terms of scope of information or building immediacy. The effect can be akin to taking a painting and slicing it into onion skin layers laid out in the air at intervals, each the same dimensions, but only one section compositionally of the greater whole. This has many semiotic applications beyond its potential aesthetically and as spatialized information possessing a sense of inter-relationship as one moves.</em>&#8220;</strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>One of the things I found very inspiring when I read your papers was that your ideas are not all dependent on a model of AR that would necessarily require goggles, back packs and lots of CPU/GPU &#8211; not that that wouldn&#8217;t be nice, but that even using &#8220;magic lens&#8221; AR of the kind smart phones has enabled in an open distributed framework would open up a lot of new possibilities for what you call modulated mapping wouldn&#8217;t it?Â  What kind of social augmented realities might be enabled by a distributed infrastructure like this [AR Wave]?</p>
<p><strong><strong>Jeremy Hight: right&#8230;.I see that as wayyy down the road&#8230;most important is the one you talk about as it is more immediate and thus more essential and needed. Eventually the goggles will be like a contact lens and a deep immersive ar version ofÂ  this will come, that to me is certain, but a ways down the road.Â  An incredible amount is possible now, and this is a more pragmatic move as opposed to the more theoretical of what is a few steps from here. Thus it is more important and essential now. Tools like Google Wave are taking what even 2 years ago was more theoretical discussions of what may be and instead introducing key elements to a more immediate, powerful, flexible level of augmentation. What have been hacks and isolated elements are to be integrated and social networking, task completion, shared tools and graphics building and geo-location.</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>I think some people question what augmented reality has to bring to the continuum of location based experiences that other forms of interface/mapping do not?</p>
<p><strong><strong><span>Jeremy Hight: rightâ€¦.and the schism between its commercial </span></strong><strong>flat self and tests with physics etc and in between &#8230;there are a lot of unfortunate assumptions it seems as to where ar and lm cross and how ar can be many things beyond deep immersion or the opposite pole of a hockey puck having a magic purple line etc&#8230;.like lm is seen as either car directions or situationist experiments with deep data&#8230;..the progression to me is deeply organic&#8230;.and now augmentation can be more malleable, variable and end user controlled.</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>Yes, it is really exciting time for AR.Â  Historically AR research has gone after the hard problems of image recognition, tracking and registration because we have had available to us these dynamic, real time, large scale architectures like Wave available (until now!),Â  so less work has been done on exploring the possibilities for distributed AR fully integrated with the internet and WWW hasn&#8217;t it?</p>
<p>A distributed augmented reality framework such as we have envisaged on Wave wouldÂ  allow people to see many layers from many different people at the same time. â€¬And this kind of model has been part of your thinking and fundamental to your work for a while, hasn&#8217;t it? But it is a very new idea to most people to think about collaboratively editing layers on the world, and to be able to viewÂ  augmented space through channels and networked communities?Â  Could you explain some of the ways you have explored these ideas and how they could be explored further now to create meaningful experiences for people?</p>
<p><strong><strong><span>Jeremy Hight: right..exactlyâ€¦modulated mapping to me can be an amazing tool for studentsâ€¦back end searching data visualizations and augmentations based on their needsâ€¦while they do something else on their computer or iphoneâ€¦that can be amazing..and not deep </span></strong><strong>immersive..The map can be active, malleable, open source fed, and even, in a sense, intelligent and able to adapt. The possibility also exists for this map to have a function that based on key words will search databases on-line to find maps, animations, histories and stories etc to place within it for your study and engagement. The map is thus a platform and yet is active. Community is possible as people can communicate graphically in works placed on the map and in building mode in the tool. All the tropes of locative media are to be in a </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> system of channels of augmentation and a spatial net. The software by design will allow development on the map and communication like programs such as second life but in </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> itself.</strong></strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modultedmapping1.jpg"><img class="alignnone size-medium wp-image-4607" title="interactive 3d map copy" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modultedmapping1-246x300.jpg" alt="interactive 3d map copy" width="246" height="300" /></a></strong></p>
<p><strong><strong><em><strong><span>image from Parsons Journal of Information Mapping Volume 2 (Hight/Wehby)</span></strong></em></strong></strong></p>
<p><strong><strong><span>I wrote an essay a few years ago for the Sarai reader questioning the traditional map and its semiotics and need to reconsider â€“ then did work looking into it and what those dynamics were and they got into 2 group shows in museums in Russiaâ€¦so it actually was my arc toward modulated mappingâ€¦an interesting way to it! But yes the map itself..this is a huge area of potential and non screen based alone navigation etc. I see now that my 2 dozen or so essays in lm,ar, interface design and augmentation have all also been leading in this direction for about 10 years now</span></strong></strong></p>
<p><strong><strong>Tish Shute: </strong>IÂ  love immersive visualization but can we &#8220;return to the map &#8211; the internet of data&#8221; as you mentioned earlier and produce interesting augmentation experiences that go beyond locative media&#8217;s device display mode without having the goggles, for example, through the magic lens of or smart phones?</strong></p>
<p><strong><strong>Jeremy Hight: yes, absolutely.Â  the map in the older paradigm is an artifice born often of war and border dispute and not of the earth itself and its processes&#8230;the new mapping like google maps is malleable, can be open source, can read spaces and can be layers of info in the related space not plucked from it as in the past..this is amazing. the old map also was born of false semiotics/semantics like &#8220;discovery of new lands&#8221; or &#8221; pioneer&#8221;Â  while the places were there already and names often were of empire&#8230;now this is no longer the case</strong></strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modulatedmapping2.jpg"><img class="alignnone size-medium wp-image-4608" title="jeremy map small2 copy" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modulatedmapping2-300x233.jpg" alt="jeremy map small2 copy" width="300" height="233" /></a></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>So geoAR is an a better way to express a new social relationship to mapping? And how does this fit into the evolving arc of locative media that evolves into augmented reality?</p>
<p><strong><strong>Jeremy Hight:&#8230;early lm was mostly geocaching and drawing with gps..it took new paradigms to invigorate the fieldÂ  a lot of folks focus on tools and what already is, cross pollination can ground ideas that are more radical&#8230;a metaphor in a sense to place what can be in a familiar context.</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>one of the great disappointments in VR has been its isolation from networked computing and also, up to now, augmented reality &#8211; to achieve an immersive experience withÂ  tight registration of media/graphics have to create separate system isolated from the internet and power of the web.</p>
<p><strong><strong>Jeremy Hight: yes&#8230;.this will change. vr is to me an island but ar takes a part of it and shifts the paradigm and new things open this way. Do you know the project <a href="http://www.lifeclipper.net/EN/process.html" target="_blank">&#8220;life clipper&#8221;</a>? friends of mine..doing interesting things..they are a clear bridge betwen lm and ar&#8230;.and from vr</strong></strong></p>
<p><strong><strong>in ar augmentation and what is being augmented become fused or in collision or in complex interactions as a means to a larger contextualization and exploration of what is being augmented..this is true in immersive or non ar&#8230;.huge potential</strong></strong></p>
<p><strong><strong>vr is a space, now can be surgery which is amazing. but not layered interaction, thus an island and graphic iconography on a location can use symbolic icons which opens up even more layers (graphic designer/information designer in me talking there I suppose..)</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>Yes !Â  talk to me more about layers and channels I think this is one of the most interesting questions for meÂ  in augmented reality at the moment &#8211; what can we do with layers and channels and the new possibilities on connections between people and environments that these can create?</p>
<p>The ability for anyone to post something is critical to the distributed idea but one of the reasons I am so excited by Google Wave is I am fascinated by the playback function. How do you think this will enable new forms of collaborative locative narratives (<a href="http://snarkmarket.com/2009/3605" target="_blank">nice post on Wave playback here </a>).</p>
<p><strong><strong>Jeremy Hight: We are in an age of cartographic awareness unseen in hundreds of years. When was the last time that new </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> tools were sold in chain stores and installed in most vehicles? When was the last time that also the augmentation of maps was done by millions (Google map hacks, etc)? The ubiquitous gps maps run in automobiles while people post pictures and graphic pins to denote specific places on on-line maps.</strong></strong></p>
<p><strong><strong>The need is for a tool that combines all of these new elements into an open source, intuitive layered and rhizomatic map that is porous (like pumice, organic in form yet with â€œbreathing roomâ€ ),ventilated (i.e: adjustable, a flow in and out), and open (open source,open access,open spatialized dialog).</strong></strong></p>
<p><strong><strong><span> I wrote of this in my essay &#8220;Revising the Map: Modulated Mapping and the Spatial Interface .&#8221;(</span></strong><span> </span><a id="h0qr" title="http://piim.newschool.edu/journal/issues/2009/02/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf )" href="http://piim.newschool.edu/journal/issues/2009/02/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf%20%29"><span>http://piim.newschool.edu/journal/issues/2009/02/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf )</span></a></strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modulatedmapping3.jpg"><img class="alignnone size-medium wp-image-4609" title="jeremy map small2 copy" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/modulatedmapping3-300x206.jpg" alt="jeremy map small2 copy" width="300" height="206" /></a></strong></p>
<p><strong><em><strong><span>image from Parsons Journal of Information Mapping (Hight/Wehby)</span></strong></em></strong></p>
<p><strong><strong>Tish Shute:</strong></strong> One mapping project I really like is <a href="http://themannahattaproject.org/" target="_blank">Mannahatta</a>.Â  How could distributed AR contribute to a project like <a href="http://themannahattaproject.org/" target="_blank">Mannahatta</a>?</p>
<p><strong><strong>Jeremy Hight: that is a good example..imagine taking manhattan and having channels of options to overlay, that being an excellent option, and imagine being able to even run a few at once with deliniating icons..you can augment a space with history, data, erasure, narrative, scientific analysis, time line of architecture, infrastructure, archaeological record etc&#8230;.endless possibilities, and this agitates place and place on a map into an active field of information with end user control&#8230;and open options for new layers</strong></strong></p>
<p><strong><strong>Tish Shute: </strong></strong>and do you think we could do interesting things with AR on a project like Mannahatta even with the current mediating devices we have available &#8211; i.e. our smart phones as obviously the rich pc experience of Mannhatta has built for it&#8217;s web interface would not be available as AR at this point?</p>
<p><strong><strong>Jeremy Hight: yes&#8230;.k.i.s.s right?Â Â  these projects do not have to only be immersive and graphic intensive&#8230;&#8230;take how people upload photos onto google maps&#8230;.just make that on a menu of options, there are some pretty cool hacks already..<br />
&#8230;options is key, a space can have a community as well, building on it in software, and others navigating it, i see it near future and down the road..always have with ar really</strong></strong></p>
<p><strong><strong><a href="../wp-content/uploads/2009/10/locativenarratives1.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/locativenarratives1.jpg"><img class="alignnone size-medium wp-image-4596" title="locativenarratives1" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/locativenarratives1-230x300.jpg" alt="locativenarratives1" width="230" height="300" /></a><br />
</strong></strong></p>
<p><strong><em><strong><span>image from Volume Magazine (Hight/Wehby)</span></strong></em></strong></p>
<p><strong><strong>Jeremy Hight: and yes, a lot of people focus on ar as its limitations and processing power needs as a major road block</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>so do you see AR on smart phones adding any value to a project like Mannahatta?</p>
<p><strong><strong>Jeremy Hight: yes&#8230;that it can be integrated into other similar works and even disparate but cloud linked ones&#8230;so a place can be &#8220;read&#8221; in diff ways on the iphone&#8230;.beyond its map location, and more can be possible if you are there&#8230;others away, so it becomes channels of augmentation</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>AR like locative media puts who you are, where you are, what you are doing, what is around you center stage in online experience but it also &#8220;puts media out in the world&#8221; &#8211; people I think understand this well as a single user experience but we are only just beginning to think about how this will manifest as a social experience &#8211; could explain more about modulated mapping as an experience of social augmentation?</p>
<p><strong><strong style="background-color: #99ff99; color: black;"><span>Jeremy H</span>ight: Modulated</strong> <strong style="background-color: #ff9999; color: black;">Mapping </strong><strong>is a tool that will allow channels to be run along the map itself. This will allow one to view different icons and augmentations both as systems on the map and in deeper layers of information (photos, videos, animations,Â  visualizations, etc) that can be turned on and off as desired. The different layers of icons and data may be history, dissent, artworks, spatialized narratives, and annotations developed that are communally based on shared interests, placed spatially and far beyond. The use of chat functionality in text or audio will be open in building mode and in </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> navigation/usage as desired. This also allows a community to develop or augment in the spaces on the earth. These nodes can be larger and open or small and set by groups in their channel. The end result is an open source sense of </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> that will also have a needed sense of user control as one can select which layers of augmentation they wish to see and interact with at any time. It also will incorporate all the functionality of locative media in </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> software and </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong>. In building mode and in map mode, icons will be coded to represent within channels (remember that the person using it has selected channels of augmentation from many based on their current interests and needs). Icons will be coded as active to show work in progress in cities and the globe to both invite participation and to further agitate the map from the sense of the static as action is visible even with its icons as people are working and community is formed in common interest/need .</strong></strong></p>
<p><strong><strong>locative media got a buzz for &#8220;reading&#8221; places&#8230;when I helped create locative narrative that was what blew me away back in 2001&#8230;that we could give places a voice by placing data from research and icons on a map&#8230;&#8230;this meant lost history or augmentation was possible as kind of voices of a place and its layers&#8230;&#8230;.I called it &#8220;narrative archaeology.&#8221; We now have tools that can push these ideas and concepts farther..much farther&#8230;and with a range beyond what was before, and then the map was just a tool&#8230;.but now we are returning to the map itself&#8230;..and this as place as much as marker..this is where ar takes the ball to use a bad metaphor</strong></strong></p>
<p><strong><strong>also that project could only work if you came to our spot of a 4 block augmentation and with us there to lend you our gear&#8230;we are far beyond that now but it had its place</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>How do you see &#8220;in context&#8221; AR and something we might call &#8220;context aware&#8221; cloud computing models interacting?</p>
<p><strong><strong>Jeremy Hight: sure&#8230;and I must add that I have issues with cloud computing as much as it is a good idea..</strong>.</strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>because of loss of autonomy?</p>
<p><strong><strong>Jeremy Hight: tivo is simply a hard drive&#8230;but it keyword reads and givesÂ  suggestions..that is the is cro magnon link to what can be</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>The nice thing about Wave is because of the Federation model, the cloud model and local store ur own data models should work together.<strong><strong><span> </span></strong></strong></p>
<p><strong><strong><span>Jeremy Hight: yes..that is better&#8230;..loss of autonomy also opens up the arbitrary which is the flaw of search engines as we know itâ€¦even Bing fails to me in that sense</span></strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>how do you mean, could you explain?</p>
<p><span> </span><strong><strong><span>Jeremy Hight: spidersÂ  cull from wordsÂ  but cull like trawlers at sea â€¦. tested Bing with very specific requests.. it spat out the same mass of mostly off topic resultsâ€¦.</span><br />
<span> I wonder if there is a way to cull from key words and topics from a userâ€¦not O</span>rwellian back end of courseâ€¦but from their preferences, their searches etc..</strong></strong></p>
<p><strong><strong>Tish Shute:</strong> </strong>did you see the discussion on search in the AR Framework doc? AR search will be a massively important thing that will take a lot of intelligence and all sorts of algorithm development won&#8217;t it?</p>
<p><strong><strong>Jeremy Hight:It also has one area of key functionality that moves into more intuitive software. Upon continued usage, the </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> software will â€œlearnâ€ and search based on key words used and spheres of interest the user is </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> or observing as mapped and will integrate deeper data and types of animations, etc. into the map or will have them waiting to be integrated upon user approval as desired. Over time the level of sophistication of additions and of search intuition will increase dramatically. The search can also, if the user wishes, run in the back end while working in the </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> program, or in off time as selected while doing other tasks. It also can never be used if one is not interested. One of the key elements of this </strong><strong style="color: black; background-color: #ff9999;">mapping</strong><strong> is that it is not composed of a closed set or needs user hacks to augment, but instead is to evolve and deepen by user controls and desired as designed. Pre-existing data,visualizations and augmentations can be integrated with relative ease.</strong></strong></p>
<p><strong><strong>Tish Shute: </strong></strong>One of the things that Joe Lamantia points out about social augmented experiences is that they will operate across a number of different scales &#8211; conversation &gt; product design &amp; build team &gt; neighborhood / town fixing potholes &gt; global community for causes. How do designs for channels and layers change across these different social scales?</p>
<p><strong><strong><strong>Jeremy Hight:</strong> quote myself &#8230;&#8221;The &#8220;frontier&#8221; is often defined as the space just ahead of the known edge and limit, and where it may be pushed out deeper into the previously unknown. The frontier in the world of ideas is not the warm comfort of what has been long assimilated; and the frontier in the landscape is not of maps, but of places beyond and before themâ„</strong></strong></p>
<p><strong><strong>The border along what has been claimed is not only that of maps â€“ it is of concepts, functions, inventions and related emergent industries. Ideas and innovations are like the cloud shape that briefly forms around a jet breaking the sound barrier, tangible yet not fully mapped into measure. It is when things are nailed down into specific entities, calibrated and assessed, that the dangers may inflict themselves â€“ greed, competition, imitation, anger, jealously, a provincial sense of ownership either possessed or demanded&#8221;. (from essay in Sarai reader). Otherwise channels and augmentation do not have to be socio-economically stratifying or defined by them. We built 34nÂ  for almost nothing on older tools.</strong></strong></p>
<div id="yqjj" style="text-align: left;"><strong><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_1g3svj8fq_b.jpg"><img class="alignnone size-medium wp-image-4599" title="dgznj3hp_1g3svj8fq_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_1g3svj8fq_b-300x225.jpg" alt="dgznj3hp_1g3svj8fq_b" width="300" height="225" /></a></strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_1g3svj8fq_b.jpg"><span> </span></a></strong></div>
<p><strong><em><strong><span>image from 34north 118westÂ  (Spellman/Hight/Knowlton)</span></strong></em></strong></p>
<p><strong><strong>The ar that is not deep immersion can be more readily available and channels can be what end users need like the diversity of chat rooms or range of Facebook users among us.</strong></strong></p>
<p><strong><strong>I had two moments yesterday that totally fit what we talked about.Â  I went to west hollywood book fair and traditional directions off of mapping for driving directions were wrong and we got lost&#8230;our friend could only get a wireless signal to map on itouch and we had to roam neighborhoods then we called a friend who google mapped it and we found we were a block away&#8230;.so a fast geomapping overlay with an icon for the book fair on some optional grid service or community would have made it immediate.Â  Then at the book fair talked to a small press publisher who is trying to map works about los angeles by los angeles authors on a map..she was stunned when I told her it could be a kind of google map feature option</strong></strong></p>
<p><strong><strong>it also has great potential to publish and place writing and art in places..both for commentary and access. imagine reading joyce in chapters where it was written about and then another similar experience but with writers who published on a service into their city.</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong></strong></strong> The challenge of shared augmented realities is not just a matter of shipping bits around, but also of how it we will use channels and layars &#8211; to create and negotiate different, distributed perspectives, understand a shared common core/or expressions of dissent (this came up in an email conversation with <a href="http://www.oreillynet.com/pub/au/166" target="_blank">Simon St Laurent</a>).</p>
<p><strong><strong><strong>Jeremy Hight:</strong> well my example earlier could have been communal in a way too..a tribe sort of augmentation channeling &#8230;.like subscribing to list servs back in the day but of augmentation communities/channels, and for folks to build and use in shared live form, coordinating too</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong></strong> </strong>one good thing though about building an open AR Framework is that as bandwidth/CPU/hardware gets better shared high def immersive experiences could be supported by the same framework..</p>
<p><strong><strong>Jeremy Hight: excellent</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong> </strong></strong>were you thinking of the image recognition and tracking with this example?</p>
<p><strong><strong><strong>Jeremy Hight:</strong> yeah&#8230;.like scanning across a multi channeled google map augmentation with diff icons and their connected data&#8230;and poss social networking and fle sharing even in that mode&#8230;and rastering etc&#8230;.could be cool with google wave </strong><strong><span>- on the map..then zooming in a la powers of ten..(eames film).</span></strong></strong></p>
<p><strong><strong>-</strong><strong><span>I have pictured variations of this for a few years now in my head like the example of my friends and I yesterdayâ€¦we could have correlated a destination by icons in diff channels..one being lit events within lit channel in l.a mapâ€¦maybe things streaming on it tooâ€¦remote info and video etc&#8230; that would be awesome</span></strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong></strong></strong> So many of the ideas in you paper on modulated mapping (see <a href="http://piim.newschool.edu/journal/issues/2009/02/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf" target="_blank">here</a>) are brilliant use cases for shared augmented realities. Perhaps you could talk more your ideas about locative narrative because this is something I think is at the core of the kinds of experiences that a distributed AR Framework would make possible?</p>
<p><strong><strong><strong>Jeremy Hight:</strong> on the project &#8220;34 north 118 west&#8221; we mapped out a 4 block area for augmentation of sound files triggered by latitude and longitude on the gps grid and map and the map on the screen had pink rectangles that were the &#8220;hot spots&#8221; where the augmentation had been placed.</strong></strong></p>
<div id="nwc6" style="text-align: left;"><strong><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_0gg994bf9_b.jpg"><img class="alignnone size-medium wp-image-4600" title="dgznj3hp_0gg994bf9_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_0gg994bf9_b-300x225.jpg" alt="dgznj3hp_0gg994bf9_b" width="300" height="225" /></a></strong></strong></div>
<p><strong><em><strong><span>image of interactive map with map based augmentation connected to audio augmentation on site for 34north 118west (Spellman/Hight/Knowlton)</span></strong></em></strong></p>
<p><strong><strong>We researched the history of the area and placed moments in time of what had been there at specific locations &#8230;.I called this <a href="http://www.xcp.bfn.org/hight.html" target="_blank">&#8220;narrative archaeology&#8221;</a> as it allowed places to be &#8220;read&#8221; by their augmentations&#8230;info that was of the place beyond the immediate experience (diff types of info) that otherwise would be lost or only found in books or web sites elsewhere. there now are locative narratives around the world but they need to be linked.Â  from humble origins &#8220;narrative archaeology&#8221; went on to be recently named of the 4 primary texts in locative media which is pretty amazing to me&#8230;but it is growing</strong></strong></p>
<p><strong><strong>- the limitations then were what I called the &#8220;bowling alley connundrum&#8221; &#8211; the specifc data had to reset like pins&#8230;..and was isolated&#8230;.this led me to think about ar back then and up to now.Â  How these could lead to much more from that point, data that would be more layered, variable , fluid..yet still augmented place and sense of place and social networking within data and software</strong></strong></p>
<p><strong><strong><a href="http://34n118w.net/34N/" target="_blank">lifeclipper</a> to me is a bridge</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong> </strong></strong>But Life Clipper is isolated from the internet currently is it?</p>
<p><strong><strong><span>Jeremy Hight: yes&#8230;ours was too.. that is what google wave makes possible.. our project only ran on our gear..in 4 blocksâ€¦with additional auxi</span>liary info online, and not malleable..but hey 2001 and all..</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong> </strong></strong>so the sites for 34 north 118 west are still active though?</p>
<p><strong>Jeremy Hight: oh yeah!</strong></p>
<p><strong><strong><strong>Tish Shute: </strong></strong></strong>nice I really like sound augmentation &#8211; have you seen <a href="http://www.soundwalk.com/blog/tag/augmented-reality/" target="_blank">Soundwalk</a>?</p>
<p><strong><strong><span>Jeremy Hight: yes, very cool..</span> </strong><strong>we chose sound only as it fought the power of image..instead caused a person to be in a sense of two places and times at once</strong></strong></p>
<p><strong><strong>Tish Shute:</strong></strong> and in 2001 that was definitely a visionary project!</p>
<p>You must be very excited that finally the pieces are coming together to make this stuff scale!</p>
<p><strong><strong><strong>Jeremy Hight:</strong> I can&#8217;t even tell you!! it is funny..i have known that this would come..just waited and waited&#8230;</strong></strong></p>
<p><strong><strong>..knew it needed the right people and tools..</strong></strong></p>
<p><strong><strong><span>..so the bowling alley connundrum led me to develop my project shortlisted for the iss (international space station)Â  as I thought a lot about how points and works are not to be isolatedâ€¦but connectedÂ  and should be flowing in diff parts of a mapâ€¦.to open up perspective and connected augmentations , but also to think about the map againâ€¦not as a base only. then moved into my work with new ways to visualize time and it all really began to gell.Â  The ideas first were published as an essay</span></strong><span> </span><a id="qw.2" title="(http://www.fylkingen.se/hz/n8/hight.html)" href="http://www.fylkingen.se/hz/n8/hight.html"><span>(http://www.fylkingen.se/hz/n8/hight.html)</span></a><span> </span><strong><span>and later my project blog</span></strong><span> (</span><a id="bp.b" title="http://floatingpointsspace.blogspot.com/)" href="http://floatingpointsspace.blogspot.com/%29"><span>http://floatingpointsspace.blogspot.com/)</span></a></strong></p>
<p><strong><strong><strong>Tish Shute:</strong> </strong></strong>One thing I noticed when I was reading your paper is how you have been exploring non-euclidian geometries.Â  Could you explain how this is part of your idea of modulated mapping?</p>
<p><strong><strong><span>Jeremy Hight: Yes, this first came to me when my wife was reading to me from a book on the Poincare Conjecture and I was hit with a new way to measure events in time and after months of sketches, schematics and research came to see how it could also be connected to a geo-spatial web of projects and augmentations.Â  It was published in the inaugural issue of Parsons School of Design&#8217;s Journal of Information Mapping which was an exciting fit.</span></strong><span><strong> I call it &#8220;Immersive Event Time&#8221;</strong>(</span><a id="o3rt" title="http://piim.newschool.edu/journal/issues/2009/01/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf)" href="http://piim.newschool.edu/journal/issues/2009/01/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf%29"><span>http://piim.newschool.edu/journal/issues/2009/01/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf)</span></a></strong></p>
<p><span><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_4cxz57xgv_b.jpg"><img class="alignnone size-medium wp-image-4634" title="dgznj3hp_4cxz57xgv_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_4cxz57xgv_b-195x300.jpg" alt="dgznj3hp_4cxz57xgv_b" width="195" height="300" /></a></strong></span></p>
<p><span><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_5g68k9ggh_b.jpg"><img class="alignnone size-medium wp-image-4635" title="dgznj3hp_5g68k9ggh_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dgznj3hp_5g68k9ggh_b-300x225.jpg" alt="dgznj3hp_5g68k9ggh_b" width="300" height="225" /></a><br />
</strong></span></p>
<p><strong><strong>so the last 3 years I have been working on how it could all work as channels of augmentation, and building and navigation as open and community in a sense as well as ai capability that was the time work especially. how time as experienced within an event is not a time &#8220;line&#8221;Â  but points on and within a form&#8230;.and how this model is better for visualizing events in time and documenting them. it actually sprang form reading a book on the poincare conjecture, popped a bunch of other stuff together so one could visualize an event in time as like being in the belly of a whale..with time as the ribs..and our measure of time as the skin&#8230;and moving within it&#8230;.hoping this will be used as educational tool</strong></strong></p>
<p><strong><strong>and this also can be tied to ar and map again&#8230;how documentation of important events can be kept within icons on a google map..then download varying visualizations based on bandwidth and desired format</strong></strong></p>
<p><strong><strong><strong>Tish Shute: </strong></strong></strong>I have been thinking about is the new forms of social interaction/agency that these kinds of augmentations of space/place/time will create.Â  it seems there are two poles &#8211; one is the area Natalie Jeremijenko explores of shifting social relations from institutions/statistics to real time/location based/interactions and new forms of social agency.Â  The other pole completely is more like the cloud based AI and perhaps crowd sourced machine learning.</p>
<p>Your ideas explore the possibilities of both these poles.Â  And certainly one of the big deals of distributed AR integrated with would be the possibilities it opened up both for new forms of networked social relationships and for new ways to draw on network effects.</p>
<p><strong><strong><strong>Jeremy Hight:</strong> and cross pollinations within &#8230;that is what my mind goes to</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong> </strong></strong>The other night I met Assaf Biderman, MIT, from the <a href="http://senseable.mit.edu/trashtrack/" target="_blank">Trash Track</a> team. Trash Track doesn&#8217;t utilize AR but I could see that there are possibilites there.<br />
What do you think?</p>
<p><strong><strong><span>Jeremy Hight: yes, absolutely,</span> </strong><strong>there can sort of skins on locations that user end selection can yield &#8230;like channels of place&#8230;.and can range from pragmatic core to art and play and places between&#8230;.how this recalibrates the semiotics of map&#8230;more than just augmentation as seen as a kind of piggy back on map..map becomes interface and defanged platform if you wil, interestingly my more poetic/philosophic writing led me here too</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong></strong></strong> I know they are at very different poles of the system but I do wonder how AR can bring some of the level of social agency/interaction that <a href="http://www.environmentalhealthclinic.net/people/natalie-jeremijenko/" target="_blank">Natalie Jeremijenko</a> works on into a productive interaction with the kind of innovations in Machine learning that Dolores Lab style machine learning!!and others are pioneering?</p>
<p><strong><strong><strong>Jeremy Hight:</strong> Natalie&#8217;s genius to me is in practical functional tech that also opens deeper questions and even new openings of what is needed..amazing layers in her work that way.. succint yet deep..very deep</strong></strong></p>
<p><strong><strong><strong>Tish Shute: </strong></strong></strong>Yes &#8211; I a just writing a post about her work &#8211; I find it deeply moving the way she has delved into the possibilities to using technology to open us up to our world.Â  One of the reasons I find distributed AR so interesting is because it will make it possible for all kinds of people to create and use augmentation in their lives and communities.</p>
<p>So to return to how a distributed AR framework could contribute to a project like Trash Track?</p>
<p><strong><strong><strong>Jeremy Hight:</strong> what about using it for community, dissent and awareness raising then?Â  like Natalie&#8217;s work but building like a communal work of multiple points, like the old adage of the elephant and the blind menÂ  sorry..metaphor &#8211; like one of my points in immersive sight was how one could take augmentation as multiple works sort of turning the faces of a thing or place&#8230;and how this would make a larger work even in such a flow so people moving in a space could also build..</strong></strong></p>
<p><strong><strong>what of ar traces left as people move calibrated to user traffic and trash as estimated in an urban space&#8230;like it goes back to chris burden in the 70&#8242;s making you know that as you turn the turnstyle you are drilling into the foundation and may be the one that collapses the building?</strong></strong></p>
<p><strong><strong>so their movements leave trash. Natalie is all about raising awareness to cause and effect and data , space and ecology. love that.Â  so maybe &#8230;<br />
a feedback loop , artifact and user end responsibility can leave traces &#8230;trash&#8230;</strong></strong></p>
<p><strong><strong>.. cybernetics vs ecology and human waste</strong></strong></p>
<p><strong><strong><strong>Tish Shute: </strong></strong></strong>could you elaborate?</p>
<p><strong><strong><strong>Jeremy Hight:</strong> brain fart&#8230;that the mass of trash people leave is a piece at a tiime&#8230;.and how like the space shuttle mission when it was argued first true cybernaut occured&#8230;.one cord to air for astronaut..one for computer on their back to fix broken bay arm&#8230;if there is a way to build on that and in relation to the topic&#8230;..how this can go further, that machines do not waste as much&#8230;as ar is a means to cybernetic raise awareness..eh..</strong><strong><span>In a sense it is likeÂ  the space shuttle mission when arguably the first true cybernaut occurredâ€¦.one cord to air for astronaut..one for computer on their back to fix broken bay armâ€¦if there is a way to build on that and in relation to the topicâ€¦..how this can go further, that machines do not waste as muchâ€¦as ar is a means to cybernetic raise awareness..eh.. hmmm.</span>.. </strong><strong> sensors etc&#8230;wearables too &#8211; could be eco awareness with data and machine and human</strong></strong></p>
<p><strong><strong>what about a cloud computing system with a slight ai in the sense of intuitive word cloud and interest scans&#8230;..so as one moves through say new york they can be offered new ai data and services as they move ? could also be of eco interests? concerns about urban farming, eco waste, air pollution etc&#8230;.perhaps with (jeremijenko element here) Â sensors placed in locations and these also giving data reads in public areas Â with no input but hard data itself&#8230;&#8230;hmm..could be interesting</strong></strong></p>
<p><strong><strong>it can also give info of the carbon footprints (estimated prob unless data is public record somehow) of chain businesses Â and data on which are more eco friendly as well as an iconography color coded and icon coded to the best places to go to support greening and eco friendly business? Â and the companies could promote themselves on this service to attract eco aware customers who would be seeing them as kindred spirits and helping the<br />
larger effort?</strong></strong></p>
<p><strong><strong>kind of eco mapping..and ar on mobile app</strong></strong></p>
<p><strong><strong>what about sensors that read air pollution levels, levels of solar radiation (to aid with skin protection in shifting light values in a city space..ie put on some skin cream now&#8230;), light sensors that detect density and over density in public spaces&#8230;to use the old trope in art of reading crowds in a space..but instead could indicate overcrowding, failing infrastructure in public spaces (which is a congestion that leads to greater pollution levels as well as flaws in city planning over time..), and perhaps a tie in to wearables&#8230;&#8230;worn sensors Â on smart clothes&#8230;.this could form a node network of people in the crowds &#8230;.and also send data within moving in a space&#8230;</strong></strong></p>
<p><strong><strong>here is a kooky thought&#8230; what of taking the computing power and data of people moving in a space..and not only get eco data and make available to them levels of<br />
data..but make possibly a roving super computer&#8230;crunching the deeper data of people open to this&#8230;&#8230;a hive crunching deeper analysis of the space, scan properties from sensors, and even a game theory esque algorithm of meta data if say 40 people out of 50 hit on a certain spike or reading&#8230;and even their input&#8230;..I worked in game theory for paleontology in this manner for a time as a teen&#8230;.a private project&#8230;&#8230; Â  the reading can lead to a sort of meta read by what hits most consistently..as well as in their input..text of what they experienced, observed,postulated,analyzed even&#8230;. this could be really interesting&#8230;even if just the last part from collected data and not from any complex branching of servers..</strong></strong></p>
<p><strong><strong>I thought at 19 or so that the flaw in paleontology was in how so many larger theories were shifting exhibitions and larger senses of things like were there pre-historic birds that were mistaken for amphibean and then back again&#8230;.so why not make a computer program and feed all the papers published into it and see what hits were counted in terms of an emerging meta theory&#8230;and landscape of key points being agreed upon&#8230;this data would be in a sense both algorithmic and a sort of unspoken dialogue &#8230;came from a lot of study of game theory one summer&#8230;</strong></strong></p>
<p><strong><strong>hope this makes some sense&#8230;I forgot to mention that I originally planned to be a research meteorologist and my plan in middle school or so was to get a phd and develop new software to have a global map and then run models of hypothetical storms across it in real time animations of cloud forms, radar and wind analysis/fields, barometric pressure spaghetti charts etc&#8230;.and to also do 3d cut away models of storm architectures&#8230;so been into visualizations of complex data and mapping for a long time!</strong></strong></p>
<p><strong><strong><strong>Tish Shute:</strong> </strong></strong>Wow let me think about this one!</p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/feed/</wfw:commentRss>
		<slash:comments>18</slash:comments>
		</item>
		<item>
		<title>Total Immersion and the &#8220;Transfigured City:&#8221; Shared Augmented Realities, the &#8220;Web Squared Era,&#8221; and Google Wave</title>
		<link>https://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/</link>
		<comments>https://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/#comments</comments>
		<pubDate>Sun, 27 Sep 2009 04:42:42 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[3D Interactive Live Show]]></category>
		<category><![CDATA[Acrossair]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[Amphibious Architecture]]></category>
		<category><![CDATA[anime]]></category>
		<category><![CDATA[Apple iPhone]]></category>
		<category><![CDATA[AR baseball cards for Topps]]></category>
		<category><![CDATA[AR Consortium]]></category>
		<category><![CDATA[AR eyewear]]></category>
		<category><![CDATA[AR goggles]]></category>
		<category><![CDATA[Architectural League of New York]]></category>
		<category><![CDATA[ARML]]></category>
		<category><![CDATA[ARN]]></category>
		<category><![CDATA[Augmented City]]></category>
		<category><![CDATA[augmented city lab]]></category>
		<category><![CDATA[augmented reality books]]></category>
		<category><![CDATA[augmented reality entrpreneurship]]></category>
		<category><![CDATA[augmented reality goggles]]></category>
		<category><![CDATA[augmented reality making visible the invisible]]></category>
		<category><![CDATA[augmented reality mark-up language]]></category>
		<category><![CDATA[augmented reality pollution meter]]></category>
		<category><![CDATA[augmented reality standards]]></category>
		<category><![CDATA[augmented reality toys]]></category>
		<category><![CDATA[augmented virtuality]]></category>
		<category><![CDATA[Bionic Eye]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Bruno Uzzan]]></category>
		<category><![CDATA[Conflux]]></category>
		<category><![CDATA[cross platform compatibility for augmented reality]]></category>
		<category><![CDATA[D'Fusion]]></category>
		<category><![CDATA[Daniel Wagner]]></category>
		<category><![CDATA[Denno Coil]]></category>
		<category><![CDATA[distributed]]></category>
		<category><![CDATA[elements of networked urbanism]]></category>
		<category><![CDATA[Elizabeth Goodman]]></category>
		<category><![CDATA[everyware]]></category>
		<category><![CDATA[Fish 'n Microchips]]></category>
		<category><![CDATA[Flickr]]></category>
		<category><![CDATA[Gavin Starks]]></category>
		<category><![CDATA[Gene Becker]]></category>
		<category><![CDATA[geo spatial web]]></category>
		<category><![CDATA[geoAR]]></category>
		<category><![CDATA[geoaugmentation]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Google Wave Protocol]]></category>
		<category><![CDATA[Gov 2.0 Expo Showcase]]></category>
		<category><![CDATA[Gov 2.0 Summit]]></category>
		<category><![CDATA[Graz University of Technology]]></category>
		<category><![CDATA[Imagination]]></category>
		<category><![CDATA[Incheon Free Economic Zone]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[Int13]]></category>
		<category><![CDATA[Interaction Design for Augmented Reality]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[Jeremy Hight]]></category>
		<category><![CDATA[Joe Lamantia]]></category>
		<category><![CDATA[Jonathan Laventhol]]></category>
		<category><![CDATA[Korea's u-Cities]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Layar 3D]]></category>
		<category><![CDATA[magic lens augmented reality]]></category>
		<category><![CDATA[manga]]></category>
		<category><![CDATA[Mark Shepard]]></category>
		<category><![CDATA[Mark Weiser]]></category>
		<category><![CDATA[markerless mobile augmented reality]]></category>
		<category><![CDATA[Metaio]]></category>
		<category><![CDATA[Microsoft Bing]]></category>
		<category><![CDATA[Mike Kuniavsky]]></category>
		<category><![CDATA[Mobilizy]]></category>
		<category><![CDATA[multiuser augmented reality]]></category>
		<category><![CDATA[Natalie Jeremijenko]]></category>
		<category><![CDATA[Natural Fuse]]></category>
		<category><![CDATA[near-field object rcognition and tracking]]></category>
		<category><![CDATA[Networked City]]></category>
		<category><![CDATA[networked urbanism]]></category>
		<category><![CDATA[newer urbanism]]></category>
		<category><![CDATA[open]]></category>
		<category><![CDATA[open augmented reality framework]]></category>
		<category><![CDATA[open augmented reality network]]></category>
		<category><![CDATA[Orange Cone]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[realtime panorama mapping on mobile phones]]></category>
		<category><![CDATA[RobotVision]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[Sentient City Survival Kit]]></category>
		<category><![CDATA[Shangri La]]></category>
		<category><![CDATA[shared augmented realities]]></category>
		<category><![CDATA[Sky Writer]]></category>
		<category><![CDATA[Steven Feiner]]></category>
		<category><![CDATA[symbiosis between augmented reality and brands]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the LAN of things]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[the web squared era]]></category>
		<category><![CDATA[ThingM]]></category>
		<category><![CDATA[things as services]]></category>
		<category><![CDATA[Thomas Wrobel]]></category>
		<category><![CDATA[Tim O'Reilly]]></category>
		<category><![CDATA[Tod E. Kurt]]></category>
		<category><![CDATA[Total Immersion]]></category>
		<category><![CDATA[Toward the Sentient City]]></category>
		<category><![CDATA[Transfigured City]]></category>
		<category><![CDATA[twitter]]></category>
		<category><![CDATA[u-City]]></category>
		<category><![CDATA[ubiquitous computing and augmented reality]]></category>
		<category><![CDATA[uCity]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[Weisarian Ubiquitous Computing]]></category>
		<category><![CDATA[Wikitude]]></category>
		<category><![CDATA[xClinic]]></category>
		<category><![CDATA[XMPP versus HTTP]]></category>
		<category><![CDATA[Yocahi Benkler]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4439</guid>
		<description><![CDATA[Above is an image aboveÂ  from Total Immersion&#8217;s augmented reality experience developed for the &#8220;Networked City&#8221; exhibition in South Korea, &#8211; &#8220;a fun scenario created for a u-City&#8217;s infrastructure and city management service&#8221; &#8220;To the naked eye, the exhibit looks like a bare bones model of a city. But when visitors put on the special [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_338cwpzntgp_b.jpg"><img class="alignnone size-medium wp-image-4440" title="dhj5mk2g_338cwpzntgp_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_338cwpzntgp_b-300x170.jpg" alt="dhj5mk2g_338cwpzntgp_b" width="300" height="170" /></a></p>
<p><em>Above is an image aboveÂ  from <a href="http://www.t-immersion.com/" target="_blank">Total Immersion&#8217;s</a> augmented reality experience developed for the <a id="winm" title="&quot;Networked City&quot; exhibition in South Korea, &quot;" href="http://www.tomorrowcity.or.kr/sv_web/en_US/space.SpaceInfo.web?targetMethod=DoUe04Sub1" target="_blank">&#8220;Networked City&#8221; exhibition in South Korea,</a> &#8211; &#8220;a fun scenario created for a<a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank"> u-City&#8217;s</a> infrastructure and city management service&#8221; </em></p>
<p><strong>&#8220;To the naked eye, the exhibit looks like a bare bones model of a city. But when visitors put on the special AR goggles a whole new world unfolds â€“ as graphics overlaid on the city model.</strong><em><strong>&#8221; </strong>(<a href="http://gamesalfresco.com/2009/09/14/total-immersion-brings-augmented-reality-to-tomorowcity-todaytomorrow/" target="_blank">Games Alfresco)</a></em></p>
<p>&#8220;The Networked City,&#8221; is a large scale augmented virtuality of a scenario for a networked city. But my guess, reading the &nbsp; &nbsp;    <em><a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">Korea IT Times</a></em>, is the plan is to move from an augmented virtuality to an augmented reality as Incheon Free Economic ZoneÂ  (IFEZ) realizes its vision to become a leading u-City &#8211; where reality is turned &#8220;inside out&#8221; (see <a id="x:2w" title="Inside Out Reality" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php">Inside Out: Interaction Design for Augmented Reality )</a>.Â <a id="x:2w" title="Inside Out Reality" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php"> </a>If you are not familiar with South Korea&#8217;s u-Cities, <a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">check out this post</a> for a short primer (and note<a href="http://www.google.com/trends?q=augmented+reality&amp;ctab=1986817859&amp;geo=all&amp;date=all" target="_blank"> Google Trends search on Augmented Reality </a>showsÂ  South Korea leaving everyone else in the dust).<a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank"></p>
<p></a></p>
<h3>Ubiquitous computing and augmented reality are like adenine and thymine &#8211; a DNA base pair.</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-24-at-11.34.35-PM.png"><img class="alignnone size-medium wp-image-4442" title="Screen shot 2009-09-24 at 11.34.35 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-24-at-11.34.35-PM-300x256.png" alt="Screen shot 2009-09-24 at 11.34.35 PM" width="300" height="256" /></a></p>
<p><em>A sky view of Incheon Free Economic Zone (<a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">from Korean IT Times</a>). For more on the IFEZ vision to become a leading u-City <a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">see here</a>.</em></p>
<p><a href="http://www.koreaittimes.com/story/4371/leading-global-u-city" target="_blank">Korea IT Times</a> writes about the u-city concept:</p>
<p><strong>&#8220;Korea began using the term u-City after accepting the concept of ubiquitous computing, a post-desktop model of human-computer interaction created by Mark Weiser, the chief technologist of the Xerox Palo Alto Research Center in California, in 1998. There have been a lot of research in this field since 2002. As a result, many local governments in Korea have applied this concept to various development projectsÂ since 2005Â based on a practical approach to it.&#8221;</strong></p>
<p>The back story to many of my recent posts, including this one, is an understanding of a relationship between ubiquitous computing and augmented reality that emerged, for me, in a February conversation with Adam Greenfield, <a title="Permanent Link to Towards a Newer Urbanism: Talking Cities, Networks, and Publics with Adam Greenfield" rel="bookmark" href="../../2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/">Towards a Newer Urbanism: Talking Cities, Networks, and Publics with Adam Greenfield</a>.Â  In cased you missed it, here is the link again because I think it holds up very well considering the rapid developments of recent months.Â  Also, importantly for this post, it includes a discussion ofÂ  moving on from Weiserian visions.</p>
<p><a href="http://speedbird.wordpress.com/" target="_blank">Adam Greenfield&#8217;s Speedbird</a> is one of my key sources for understanding &#8220;networked urbanism,&#8221; and the list he makes of <a href="http://speedbird.wordpress.com/2009/03/22/the-elements-of-networked-urbanism/" target="_blank">the elements of networked urbanism here</a> (also see the comments) &#8211; is my mantra for thinking about the DNA base pair relationship of augmented reality and ubiquitous computing.</p>
<p>Adam Greenfield&#8217;s, <strong>&#8220;summary of what those of us who are thinking, writing and speaking about networked urbanism seem to be seeing&#8221;</strong> is:</p>
<p><strong>1. From <em>latent</em> to <em>explicit</em>; 2. From <em>browse</em> to <em>search</em>; 3. From <em>held</em> to <em>shared</em>; 4. From <em>expiring</em> to <em>persistent</em>; 5. From <em>deferred</em> to <em>real-time</em>; 6. From <em>passive</em> to <em>interactive</em>; 7. From <em>component</em> to <em>resource</em>; 8. From <em>constant</em> to <em>variable</em>; 9. From <em>wayfinding</em> to <em>wayshowing</em>; 10. From <em>object</em> to <em>service</em>; 11. From <em>vehicle</em> to <em>mobility</em>; 12. From <em>community</em> to <em>social network</em>; 13. From <em>ownership</em> to <em>use</em>; 14. From <em>consumer</em> to <em>constituent</em>.</strong></p>
<p><strong></p>
<p></strong></p>
<h3>Augmented Reality &#8211; Making Visible the Invisible</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-2.44.27-PM.png"><img class="alignnone size-medium wp-image-4509" title="Screen shot 2009-09-26 at 2.44.27 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-2.44.27-PM-300x229.png" alt="Screen shot 2009-09-26 at 2.44.27 PM" width="300" height="229" /></a></p>
<p>The screenshot above is one ofÂ  the coolest &#8220;making visible the invisible&#8221; AR applications. It was developed at Columbia University Graphics and User Interface Lab where <a href="http://www1.cs.columbia.edu/%7Efeiner/" target="_blank">Steven Feiner</a> is Director (see the deep list of projects from the lab <a href="http://graphics.cs.columbia.edu/top.html" target="_blank">here</a>).Â  This app &#8220;shows carbon monoxide levels projected over New York City. The height of each ball reflects concentrations of the pollutant.&#8221; Credit: Sean White and Steven FeinerÂ  (<a href="http://www.technologyreview.com/computing/23515/page2/" target="_blank">via Technology Review</a>).</p>
<p>The recent emergence of &#8220;magic lens&#8221; augmented reality apps for our smart phones &#8211; <a href="http://www.wikitude.org/" target="_blank">Wikitude</a>, <a href="http://layar.com/" target="_blank">Layar,</a> <a href="http://www.acrossair.com/" target="_blank">Acrossair</a>, <a href="http://support.sekaicamera.com/">Sekai Camera</a>, and many others now, have given us a new window into our cities. But we are yet to realize the full potential of the AR/ubicomp base pair that can &#8220;make visible the invisible&#8221; and give us new opportunities to relate to the invisible data ecosystems of our cities, not merely as a spectator experience,Â  but as an interactive, in context, real time opportunity to reimagine social relations.</p>
<p><a href="http://www.sentientcity.net/exhibit/?p=3" target="_blank">Mark Shepard</a> says in <a href="http://www.sentientcity.net/exhibit/?p=3" target="_blank">his curatorial statement</a> for, <a href="http://www.sentientcity.net/exhibit/" target="_blank">&#8220;Toward the Sentient City:&#8221;</a> (Much more soon on this very significant exhibit which runs from Sept. 17th to Nov. 7th, 2009.)</p>
<p><strong>&#8220;In place of natural weather systems, however, today we find the dataclouds of 21st century urban space increasingly shaping our experience of this city and the choices we make there.&#8221;</strong></p>
<p>Augmented Reality, as Joe Lamantia points out, is becoming the great &#8220;<a id="o0mh" title="ambassador of ubiqitous computing" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php">ambassador of ubiqitous computing</a>.&#8221; AR is. &#8220;<strong>&#8230;mak[ing] it possible to experience the new world of ubiquitous computing by reifying the digital layer that permeates our inside-out world,&#8221; </strong>and we are only just glimpsing the razor thin end of the wedge in this regard.</p>
<p>I am still working on my <a href="http://www.gov2summit.com/" target="_blank">Gov 2.0 Summit </a>write upÂ  and, amongst other things, I will talk about how an emerging new social contract around open data, here in the US,Â  will put augmented realityÂ  apps center stageÂ  &#8211; &#8220;doing stuff that matters.&#8221; At <a href="http://www.gov2expo.com/gov2expo2009" target="_blank">Gov 2.0 Expo Showcase</a> Tim O&#8217;Reilly tweeted:</p>
<p><a id="i23q" title="Tim O'Reilly" href="http://twitter.com/timoreilly">Tim O&#8217;Reilly</a> Really enjoyed @capttaco (Digital Arch Design) @ #gov20e: &#8220;Augmented Reality could be a new public infrastructure&#8221; <a href="http://bit.ly/18iCx" target="_blank">http://bit.ly/18iCx</a></p>
<p>Also see Tim O&#8217;Reilly and Jennifer Pahlka on Forbes.com discuss the <a href="http://www.forbes.com/2009/09/23/web-squared-oreilly-technology-breakthroughs-web2point0.html" target="_blank">The &#8220;Web Squared&#8221; Era</a> -Â <strong> &#8220;the Web Squared era is an era of augmented reality arriving (like the sensor revolution) stealthily, in more pedestrian clothes than we expected</strong>.<strong>&#8230; &#8230;our world will have &#8220;<a href="http://www.orangecone.com/archives/2009/02/smart_things_an.html" target="_blank">information shadows</a>.&#8221; Augmented reality amounts to information shadows made visible.&#8221;</strong></p>
<p>Again there is back story to how I came to think about Information Shadows in relation to augmented reality.Â  So in case your missed it the first time, here is the link to a conversation that began in a hallway meeting between Tim O&#8217;Reilly, Mike Kuniavsky, <a href="http://thingm.com/" target="_blank">ThingM</a>, Usman Haque, <a href="http://www.pachube.com/" target="_blank">Pachube</a>, and Gavin Starks, <a href="http://www.amee.com/" target="_blank">AMEE</a>, at <a href="http://en.oreilly.com/et2009/" target="_blank">ETech earlier this year</a>,Â  <a title="Permanent Link to Dematerializing the World, Shadows, Subscriptions and Things as Services: Talking With Mike Kuniavsky at ETech 2009" rel="bookmark" href="../../2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/">&#8220;Dematerializing the World, Shadows, Subscriptions and Things as Services: Talking With Mike Kuniavsky at ETech 2009</a>.&#8221;</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.32.09-PM.png"><img class="alignnone size-medium wp-image-4547" title="Screen shot 2009-09-26 at 9.32.09 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.32.09-PM-300x225.png" alt="Screen shot 2009-09-26 at 9.32.09 PM" width="300" height="225" /></a></p>
<p><a href="http://www.slideshare.net/rlenz/augmented-city-lab-picnic-09" target="_blank">Slide from Augmented City Lab</a> @ <a href="http://www.picnicnetwork.org/" target="_blank">Picnic &#8217;09</a></p>
<h3>So What&#8217;s Next for Mobile Augmented Reality?</h3>
<p><a href="http://www.youtube.com/watch?v=434zw201iN8&amp;feature=player_embedded" target="_blank"><img class="alignnone size-medium wp-image-4513" title="Screen shot 2009-09-26 at 3.45.45 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-3.45.45-PM-300x186.png" alt="Screen shot 2009-09-26 at 3.45.45 PM" width="300" height="186" /></a></p>
<p>These videos from Daniel Wagner&#8217;s team from Graz University of Technology showing <a href="http://www.youtube.com/watch?v=434zw201iN8&amp;feature=player_embedded" target="_blank">Realtime Panorama Mapping and Tracking on Mobile Phones</a> and <a href="http://www.youtube.com/watch?v=W-mJG3peIXA&amp;feature=player_embedded" target="_blank">Creating an Indoor Panorama in Realtime</a>, as Rouli from Games Alfresco points out,Â  indicate that there is a lot in store for us at <a href="http://www.icg.tugraz.at/Members/daniel/MultipleTargetDetectionAndTrackingWithGuaranteedFrameratesOnMobilePhones/inproceedings_view">ISMAR09</a>.</p>
<p>We may not be so impressed by directory style/&#8221;post it&#8221; AR anymore, as these applications have become common place so quickly!Â  But while these early mobile AR apps may be disappointing in relation to some futurist visions of AR &#8211; merely AR/ubicomp appetizers,Â  there are still good implementations of this model coming out (see new comers to the app store<a id="tzvf" title="Bionic Eye" href="http://mashable.com/2009/09/24/bionic-eye/" target="_blank"> Bionic Eye</a> and <a href="http://www.readwriteweb.com/archives/robotvision_a_bing-powered_iphone_augmented_realit.php" target="_blank">RobotVision</a>). And <a href="http://layar.com/" target="_blank">Layar,</a> always on the ball, has upped the ante for the new cohort of AR Browsers with <a href="http://layar.com/3d/" target="_blank">Layar 3D</a>.</p>
<p>But as Bruce Sterling <a href="http://www.wired.com/beyond_the_beyond/2009/09/augmented-reality-robotvision/" target="_blank">notes here</a>:</p>
<p><strong>*In AR, everybody wants to be the platform and the browser, and nobody wants to be the boring old geolocative database. Look how Tim [creator of RobotVision] here, who is like one guy working on his weekends, can boldly fold-in the multi-billion dollar, multi-million user empires of Apple iPhone, Microsoft Bing, Flickr, and Twitter, all under his right thumb</strong></p>
<p> (watch <a id="qxek" title="video here" href="http://www.youtube.com/watch?v=hWC9gax7SCA&amp;feature=player_embedded">video here</a>)</p>
<p>But ifÂ  you looking for something more from AR, you probably won&#8217;t have to wait too long.Â  The two pioneering companies in AR, <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> &#8211; founded in 1999, and <a href="http://www.metaio.com/" target="_blank">Metaio</a> &#8211; founded in 2003 are both coming out with &#8220;mobile augmented reality platforms&#8221; in a matter of weeks (see press releases <a href="http://augmented-reality-news.com/2009/09/14/bringing-its-augmented-reality-to-mobile-applications-total-immersion-partners-with-smartphones-app-provider-int13/" target="_blank">here</a> and <a href="http://gamesalfresco.com/2009/09/18/metaio-announcing-mobile-augmented-reality-platform-junaio/" target="_blank">here</a>).Â  And both companies, it seems, will deploy much more sophisticated AR rendering and tracking than we have seen to date.</p>
<p>I approached Bruno Uzzan, founder and CEO of Total Immersion, for an interview as part of my look at the new industry of augmented reality through the eyes of the founding members of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a>. These consortium members are some of the first commercial augmented reality companies.</p>
<p><a href="#jumpto">The interview below</a> with Bruno began early this summer and then we both went on vacation and it picks up after the announcement of the <a href="http://www.int13.net/blog/en/" target="_blank">partnership between Total Immersion and Int13</a>.</p>
<p>The significance of this announcement is that Total Immersion is now positioned to take the augmented reality experiences they have developed for a number of top brands onto multiple mobile platforms with, &#8220;<strong>Int13&#8242;s very clever embedded solution that allows our [Total Immersion's] solutions to work across many [mobile] platforms,&#8221; </strong>while Int13 gets to extend their reach.</p>
<p>Total Immersion has a 50 person R&amp;D team and their two main focuses have been, firstly getting:<strong> </strong></p>
<p><strong>&#8220;Augmented Reality to work with as many platforms as possible &#8211; PC, Mac, Mobile, Game Consoles, all those are the platforms that we are targeting. We are currently doing lot of work in the R &amp; D team in cross platform compatibility&#8230;.&#8221;</strong></p>
<p>and, secondly:<strong></p>
<p></strong></p>
<p><strong>&#8220;Our R&amp;D guys are working on the real world interacting more with the virtual world.Â  And I have started seeing some results which are pretty much crazy and this will be ready for next year.&#8221;</strong></p>
<p><strong></p>
<p></strong></p>
<h3>Pandora&#8217;s Box &#8211; Shared Augmented Realities</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-25-at-1.18.15-AM.png"><img class="alignnone size-medium wp-image-4450" title="Screen shot 2009-09-25 at 1.18.15 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-25-at-1.18.15-AM-186x300.png" alt="Screen shot 2009-09-25 at 1.18.15 AM" width="186" height="300" /></a></p>
<p>Spes or &#8220;Hope&#8221;; <a title="Engraving" href="http://en.wikipedia.org/wiki/Engraving">engraving</a> by <a title="Sebald Beham" href="http://en.wikipedia.org/wiki/Sebald_Beham">Sebald Beham</a>, German c1540 (see <a href="http://en.wikipedia.org/wiki/Pandora%27s_box" target="_blank">Wikipedia article on Pandora&#8217;s Box</a>)</p>
<p>There are many weaknesses to the mobile smart phone AR experiences we have now, and the lack of near field object recognition (to date), and difficulties with accurate positioning aren&#8217;t the only ones.Â  Note re solving positioning problems in mobile AR, we are yet to see ARÂ  leverage public libraries for analyzing scenes like Flickr&#8217;s geo tagged photos, see Aaron Straup Copesâ€™s work on <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">â€œThe Shape of Alpha.â€</a> And for more on this <a href="http://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/" target="_blank">my post here</a>.</p>
<p>But, as Joe Lamantia points out:</p>
<p><strong>â€œOne of the weakest aspects of the existing interaction patterns for augmented reality is their reliance on single-person, socially disconnected user experiences.â€</strong></p>
<p>In my view, <strong>The Pandora&#8217;s Box of Augmented Realities</strong> is an open, distributed, multiuser augmented reality framework, fully integrated with the internet and world wide web.</p>
<p>As Yochai Benkler has pointed out many times, and argues again in, <a href="Capital, Power, and the Next Step in Decentralization" target="_blank">Capital, Power, and the Next Step in Decentralization</a>, it is &#8220;open, collaborative, distributed practices that have been at the core of what made the Internet.&#8221;Â  We have to try to make sure that open, collaborative, distributed practices are at the core of mobile augmented reality.</p>
<p><strong></p>
<p></strong></p>
<h3>Can Google Wave be the basis for an Open, Distributed, Multiuser Augmented Reality Framework?</h3>
<p><a href="http://www.lostagain.nl/tempspace/PrototypeDiagram.html" target="_blank"><img class="alignnone size-medium wp-image-4492" title="Screen shot 2009-09-25 at 11.51.20 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-25-at-11.51.20-PM-300x141.png" alt="Screen shot 2009-09-25 at 11.51.20 PM" width="300" height="141" /></a></p>
<p>I have been exploring the idea of using <a href="http://wave.google.com/" target="_blank">Google Wave </a>protocol as the basis for a distributed, multiuser open augmented reality framework with a small group of AR enthusiasts and developers. And I am happy to say the proposal is beginning to get fleshed out a little.Â  New collaborators are welcome both for &#8220;gear heady&#8221; input and use case suggestions (but re the latter, you can&#8217;t just say everything you see in <a href="http://en.wikipedia.org/wiki/Denn%C5%8D_Coil" target="_blank">Denno Coil</a>..!).</p>
<p>This effort started with Thomas Wrobel&#8217;sÂ  proposal for an Open AR Framework prototyped on IRC &#8211; see <a id="s336" title="here" href="../../2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/">here,</a> and click to enlarge the image above of, <a href="http://www.lostagain.nl/tempspace/PrototypeDiagram.html" target="_blank">&#8220;Sky Writer: Basic Concept for an Open Multi-source AR Framework.&#8221;</a></p>
<p>But recently we began looking at the <a href="Wave Federation Protocol" target="_blank">Wave Federation Protocol</a>.Â  And, if you check out <a id="ogbq" title="this post," href="http://www.jasonkolb.com/weblog/2009/09/why-google-wave-is-the-coolest-thing-since-sliced-bread.html#more" target="_blank">this post,</a> and <a id="c0ep" title="this post" href="http://reuvencohen.sys-con.com/node/980762" target="_blank">this post</a>, you may get a glimpse of why Google Wave protocol might be a good basis for an open, distributed, AR Framework.Â  You will notice, if you study what Google Wave has done with the XMPP protocol, that many ofÂ <a href="http://speedbird.wordpress.com/2009/03/22/the-elements-of-networked-urbanism/" target="_blank"> the elements of networked urbanism</a> that Adam Greenfield describes resonate strongly with what is being attempted in Wave.</p>
<p>But enough said for now!Â  Regardless of the details of implementation,Â  Google Wave or an AR protocol built from scratch (phew! the latter does seem like a lot of work) -Â  an open, distributed, multiuser AR framework integrated with the internet and web would explode the potential of AR, creating new possibilities for data flows, mashups ,and shared augmented realities.</p>
<p>And we are excited by Google Wave because, as Thomas puts it:</p>
<p><strong>&#8220;The really great thing wave does &#8230;.(aside from being an open standard backed by a major player&#8230;hopefully leading to thousands of worldwide servers )&#8230;.is that it allows anyone to create any number of waves, set precisely who can view or edit them, and for them to be able to be updated quickly and continuously (and even simultaneously!)</strong><strong> Better yet, changes will (if necessary) propagate to all the other servers sharing that wave. It does all this right now. From my eyes this does a lot of the work of an AR infrastructure already.</strong></p>
<p><strong>I cant see any other protocol actually doing anything like this at the moment, although correct me if I&#8217;m wrong, as alternatives are always welcome :)&#8221;</strong></p>
<p>Also, Thomas notes, <strong>&#8220;even the playback system (that is, the ability to playback the changes made to a wave since its creation) &#8230;this could give us automatically some of the ideas Jeremy Hight has mentioned in <a href="http://piim.newschool.edu/journal/issues/2009/01/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf" target="_blank">his visionary work here</a>,Â  and <a href="http://piim.newschool.edu/journal/issues/2009/02/pdfs/ParsonsJournalForInformationMapping_Hight-Jeremy.pdf" target="_blank">here</a> on &#8220;the geo spatial web, interlinked locations and data, immersive augmentation and open source geo augmentation.&#8221;</strong></p>
<p>One of the many reasons why an Open, distributed AR Framework would be so cool is it would open up all kinds of possibilities for <span>GeoAR</span> by providing the over-arching standard protocol for communication of updates necessary for the substandards that will facilitate <span>GeoAR</span>.</p>
<p>Also important to note is theÂ  <a id="o0is" title="Wave Federation Protocol docs which are all publicly available here" href="http://www.waveprotocol.org/" target="_blank">Wave Federation Protocol</a> allows anyone:</p>
<p><strong>&#8220;to run wave servers and become wave providers, for themselves, or as services for their users, and to &#8220;federate&#8221; waves, that is, to share waves with each other and with Google Wave. &#8211; &#8220;the federation gateway and a federation proxy and is based on open extension to <a href="http://www.waveprotocol.org/draft-protocol-spec#RFC3920">XMPP core</a> [RFC3920] protocol to allow near real-time communication between two wave servers.&#8221; See Reuven Cohen&#8217;s blog for more <a id="rmr3" title="here" href="http://reuvencohen.sys-con.com/node/980762" target="_blank">here</a> and <a id="mqxr" title="&quot;HTTP is Dead, Long Live the Real Time Cloud.&quot;" href="http://www.elasticvapor.com/2009/05/http-is-dead-long-live-realtime-cloud.html" target="_blank">here, &#8220;HTTP is Dead, Long Live the Real Time Cloud.&#8221;</a></strong></p>
<p>Still some people have expressed concern that an AR Framework using Google Wave protocol would give Google disproportionate influence. Â  Will Google-specific functionality be an issue?Â  How much stuff is Google specific just because no one else is using it (yet)? And how much is Google specific because it holds no value to anyone else but Google? These are some of the questions that have come up.</p>
<p>You are going to see a variety of suggestions for standards and specs for open AR coming out out in the next few months which as, Robert Rice of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a> points out is: <strong>&#8220;a good thing, we need that competition early on to settle down on best case.&#8221; </strong>Recently,Â <a href="http://www.mobilizy.com/" target="_blank"> Mobilizy</a> have offered up an ARML (&#8220;an augmented reality mark-up language specification based on the OpenGISÂ® KML Encoding Standard (OGC KML) with extensions&#8221;) for consideration see <a href="http://www.mobilizy.com/enpress-release-mobilizy-proposes-arml" target="_blank">here.</a></p>
<p>So it is, perhaps, also important to note, that an Open AR Framework should be neutral/transparent to techniques ofÂ  &#8220;reality recognition,&#8221;Â  and methodologies of registration/tracking, allowing various ones to work on the system as new techniques evolve, and to support as many evolving standards as possible.</p>
<p>Augmented Reality developers, like Total Immersion and others with powerful rendering/tracking AR software, should be able use an Open AR Framework to exchange the data which their tracking will use. And the tracking/rendering problems they and other researchers have solved are much harder than figuring out data exchange on on a standard infrastructure or protocol!</p>
<p>So I pricked up my ears when I heard Bruno Uzzan, CEO of <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> -Â  the first and currently the largest augmented reality company, with a 50 person R&amp;D team in France and offices in LA, where Bruno himself is now based, say: <strong>&#8220;Total Immersion isÂ  only months away from launching shared mobile augmented reality experiences using near field object recognition/tracking across multiple platforms&#8221;</strong> (for more details read my conversation with Bruno Uzzan <a href="#jumpto">below</a>).</p>
<p>I was happy when I asked Bruno about the possibilities for developing an open, distributed, multiuser augmented reality framework fully integrated with the internet and world wide web (possibly using Google Wave protocols), and he replied:</p>
<p><span id="pnk:" title="Click to view full content"><strong>&#8220;I think this is feasible. I think that&#8217;s doable, that&#8217;s justÂ  in my opinion. I mean some people might have another kind of opinion but I think that that&#8217;s definitely doable.&#8221;</strong></span></p>
<p><span title="Click to view full content"><strong></p>
<p></strong></span></p>
<h3>Total Immersion &#8211; working with the &#8220;symbiosis between augmented reality and brands&#8221;</h3>
<p><a href="http://www.youtube.com/watch?v=I7jm-AsY0lU" target="_blank"><img class="alignnone size-medium wp-image-4457" title="dhj5mk2g_344g64g96cq_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_344g64g96cq_b-300x224.png" alt="dhj5mk2g_344g64g96cq_b" width="300" height="224" /></a></p>
<p>Total Immersion has created many of the best known and most ambitious augmented reality experiences for major brands to date, including Mattel&#8217;s <a title="new toys" href="http://www.readwriteweb.com/archives/mattels_new_web-enabled_avatar_toys_will_offer_augmented_reality.php">new AR toys</a><a title="new toys" href="http://www.readwriteweb.com/archives/mattels_new_web-enabled_avatar_toys_will_offer_augmented_reality.php"><img src="http://www.uxmatters.com/mt/archives/images/new-window-arrow.gif" alt="" width="14" height="12" /></a> to be released in conjunction with the James Cameron film Avatar, and <a id="dmas" title="AR baseball cards for Topps" href="http://www.youtube.com/watch?v=I7jm-AsY0lU">AR baseball cards for Topps</a>, <a href="http://www.youtube.com/watch?v=I7jm-AsY0lU" target="_blank">video here</a> (or click screenshot above), and the <a href="http://www.publishersweekly.com/article/CA6698612.html?industryid=47152" target="_blank">UK&#8217;s first augmented reality book</a>s.</p>
<p>Bruno founded Total Immersion 10 years ago when he was just 27. And the kind of conviction it took to survive as an augmented reality business in the decade before augmented reality captured the world&#8217;s attention is remarkable.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_343dbsph2fz_b1.png"><img class="alignnone size-medium wp-image-4456" title="dhj5mk2g_343dbsph2fz_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_343dbsph2fz_b1-300x225.png" alt="dhj5mk2g_343dbsph2fz_b" width="300" height="225" /></a></p>
<p>AR&#8217;s first steps out into the world after 17 years as predominantly a lab science maybe &#8220;wobbly&#8221; (what new technology isn&#8217;t), and sometimes gloriously kitsch &#8211; check out<a id="d_eu" title="the riotus video of and AR Live Show Total Immersion produced in Korea here." href="http://www.t-immersion.com/en,video-gallery,36.html" target="_blank"> this riotus video of the 3D Interactive Live Show Total Immersion produced in Korea </a> (also see the <a href="http://augmented-reality-news.com/2009/09/15/entertainment-first-interactive-3d-live-show-now-open-in-south-korea/" target="_blank">Total Immersion Augmented Reality Blog</a> for more on the TI&#8217;s turn keyÂ  Interactive 3D Live Show Solution).</p>
<p>As Lamantia points out <a id="eo6x" title="here" href="http://www.uxmatters.com/mt/archives/2009/08/inside-out-interaction-design-for-augmented-reality.php" target="_blank">here</a>, &#8221; projecting mixed realities into public, common, or social spaces makes them  social by default.&#8221;</p>
<p>However, the potential for shared location based augmented reality experiences is as yet untapped.Â  So I see the entry of the most experienced commercial augmented reality company into mobile as pretty interesting.Â Â  WhileÂ  smart phone AR still has significant limitations, and it certainly does differ from some of the futurist dreams of AR (see <a id="x3:y" title="Mok Oh's post hear on his disappointment in this regard" href="http://allthingsv.com/2009/09/03/you-know-what-really-grinds-my-gears-augmented-reality/">Mok Oh&#8217;s post here on his disappointment in this regard)</a>, it is significant that Total Immersion is committing to becoming a leader in mobile AR.</p>
<p>Our smart phones, the powerful networked sensor devices that so many people carry in their pockets, have proved themselves a &#8220;good enough for now&#8221;Â  mediating device for early manifestations of the ubiquitous computing and augmented reality base pair.Â  And now AR and ubicomp is mixed in theÂ  rich, messy soup of everyday life, commerce, business, marketing, art, entertainment, and government, we should get ready to see these technologies grow up fast, and unfold in some surprising ways that lab science didn&#8217;t necessarily predict.</p>
<p>And, perhaps, the new dialogue between scientists and entrepreneurs may spur both communities to outdo themselves.</p>
<p>Particularly, as <a href="http://programmerjoe.com/" target="_blank">Joe Ludwig</a> notes: &#8220;It seems to me that the biggest disconnect between the academics and the entrepreneurs is that they disagree on how far we are from the finish line.&#8221;</p>
<p>See the comments&#8217;s on Ori Inbar&#8217;s post, <a title="Augmented Reality Entrepreneurship: Natural Evolution or IntelligentÂ Design?" rel="bookmark" href="http://gamesalfresco.com/2009/09/22/augmented-reality-entrepreneurship-natural-evolution-or-intelligent-design/">Augmented Reality Entrepreneurship: Natural Evolution or IntelligentÂ Design?</a>, forÂ  a courteous but spirited discussion on the potential benefits and frictions of the newly expanded AR community ofÂ  researchers andÂ  entrepreneurs.</p>
<p>As <a href="http://www.cc.gatech.edu/~blair/home.html" target="_blank">Blair MacIntyre </a>(see my long conversation with Blair<a href="http://www.ugotrade.com/2009/06/12/mobile-augmented-reality-and-mirror-worlds-talking-with-blair-macintyre/" target="_blank"> here</a>) notes:</p>
<p><strong>&#8220;not all academics and researchers are only interested in the traditional models of impact. Case in point: I wouldnâ€™t be building unpublishable games, nor investing so much time talking to the press, entrepreneurs and VCs if I did not believe strongly in the value of the impact I am having by doing that â€” and I know others with the same attitude.&#8221;</strong></p>
<p>In this vein, check out the Marble Game (<a href="http://www.youtube.com/watch?v=6AKgH4On65A&amp;feature=player_embedded" target="_blank">video here</a>) developed by Steve Feiner and his team at Columbia U. It&#8217;s enabled by Goblin XNA, an open source AR framework built on top of Microsoft&#8217;s XNA, which powers XBox live games, Zune games, and some Windows games. For more about Goblin XNA and AR from Columbia U <a href="http://graphics.cs.columbia.edu/projects/goblin/index.htm" target="_blank">see here</a>.Â  (Hat tip to <a href="http://www.oreillynet.com/pub/au/125" target="_blank">Brian Jepson</a> for this link)</p>
<p><a href="http://www.youtube.com/watch?v=6AKgH4On65A&amp;feature=player_embedded" target="_blank"><img class="alignnone size-medium wp-image-4528" title="Screen shot 2009-09-26 at 5.16.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-5.16.56-PM-300x182.png" alt="Screen shot 2009-09-26 at 5.16.56 PM" width="300" height="182" /></a></p>
<p>While we are still waiting for the kind of sexy AR specs &#8211; nothing totally game changing in <a href="http://gigantico.squarespace.com/336554365346/2009/9/20/eye-for-an-iphone.html" target="_blank">Gigantico&#8217;s AR eyewear rounup</a> (<a href="http://appft1.uspto.gov/netacgi/nph-Parser?Sect1=PTO1&amp;Sect2=HITOFF&amp;d=PG01&amp;p=1&amp;u=%2Fnetahtml%2FPTO%2Fsrchnum.html&amp;r=1&amp;f=G&amp;l=50&amp;s1=%2220080088937%22.PGNR.&amp;OS=DN/20080088937&amp;RS=DN/20080088937" target="_blank">maybe note this Apple patent</a>), that might get wide adoption. But at least researchers are not afraid to explore the possibilities of AR Goggles.</p>
<p>But how far are we now, with or without sexy goggles,Â  from a fuller expression of the base pair DNA of ubiquitous computing and augmented reality?</p>
<h3>We may have a LAN of things before we have an Internet of Things</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_345g9bxbwd3_b1.jpg"><img class="alignnone size-medium wp-image-4534" title="dhj5mk2g_345g9bxbwd3_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_345g9bxbwd3_b1-300x199.jpg" alt="dhj5mk2g_345g9bxbwd3_b" width="300" height="199" /></a></p>
<p><em>The picture above is a workshop I attended at <a href="http://confluxfestival.org/2009/about/" target="_blank">Conflux</a> last weekend &#8211; <a href="http://confluxfestival.org/2009/events/workshops/natalie-jeremijenko/" target="_blank">Fish â€˜n microChips</a>, with <a href="http://www.environmentalhealthclinic.net/people/natalie-jeremijenko/" target="_blank">Natalie Jeremijenko.</a> We are at the site of the <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Amphibious Architecture</a> project (a commissioned work for <a href="http://www.sentientcity.net/exhibit/?cat=3" target="_blank">Toward the Sentient City</a>) and &#8220;a collaborative project with <a href="http://www.environmentalhealthclinic.net/environmental-health-clinic/" target="_blank">xClinic</a>, The Living and other intelligent creatures.&#8221;</em></p>
<p>We are probably as far off some grand futurist visions of ubiquitious computing as we are some of the futurist visions of augmented reality. But as it turns out that may not be a bad thing! Recently, <a href="http://twitter.com/mikekuniavsky" target="_blank">@mikekuniavsky</a> noted in a tweet:</p>
<p><span><span>&#8220;Another argument for the LAN of Things before the Internet of Things: <a rel="nofollow" href="http://tinyurl.com/lgp9uq" target="_blank">http://tinyurl.com/lgp9uq&#8221;</a></span></span></p>
<p><span><span>Bert Moore, <a href="http://www.aimglobal.org/members/news/templates/template.aspx?articleid=3553&amp;zoneid=24" target="_blank">in the article Mike linked to points out</a>, the grand vision of an &#8220;internet of things&#8221; with everything connected to everythingÂ  can &#8220;distract people from thinking about the benefits of RFID in smaller, more easily implemented and cost-justified applications.&#8221;Â  The same argument I think applies to sensor networks and augmented reality.</p>
<p></span></span></p>
<p>In New York City, a series of commissioned works for the <a href="http://www.archleague.org/" target="_blank">Architectural League of New York&#8217;s</a> exhibit,<em> </em><a href="http://www.sentientcity.net/exhibit/?cat=3" target="_blank">&#8220;Toward the Sentient City&#8221;</a><em> </em>are giving us the opportunity to dip our toes into the ocean of a &#8220;networked urbanism.&#8221; Â  For only a small budget, two of the <a href="http://www.sentientcity.net/exhibit/?cat=4" target="_blank">five commissioned works</a>, <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Amphibeous Architecture</a> and <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a> demonstrate how sensor networks can allow us to explore new kinds of communities &#8211; connecting people to environments in interesting ways to create new forms of social agency.</p>
<p><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">&#8220;Amphibeous Architecture</a>&#8221; -Â  from The Living Architecture Lab at Columbia University Graduate School of Architecture, Planning and Preservation (Directors David Benjamin and Soo-in Yang) and Natalie Jeremijenko, Environmental Health Clinic at New York University, uses a skillfully built (electronics and water are notoriously hard to mix) array of partially submerged sensors to pierce the blinding, reflective surfaces of the riversÂ  surrounding Manhattan and to create a new two way relationship with the ecosystem below &#8211; the water, our neighbors the fish and even a beaver that lives in the water surrounding Manhattan.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.34.56-PM.png"><img class="alignnone size-medium wp-image-4536" title="Screen shot 2009-09-26 at 6.34.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.34.56-PM-300x125.png" alt="Screen shot 2009-09-26 at 6.34.56 PM" width="300" height="125" /></a></p>
<p><em>Image from <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Toward the Sentient City</a></em></p>
<p>In a similar spirit, &#8220;<a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a>&#8221; &#8211; Usman Haque, creative director, Nitipak â€˜Dotâ€™ Samsen, designer, Ai Hasegawa, designer, Cesar Harada, designer, Barbara Jasinowicz, producer, creates a network of people and electronically assisted plants to explore what it takes to work together on energy consumption and to experience the consequences of &#8220;selfish&#8221; and &#8220;unselfish&#8221; behavior interactively before it is too late to modify our actions.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.55.29-PM.png"><img class="alignnone size-thumbnail wp-image-4537" title="Screen shot 2009-09-26 at 6.55.29 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-6.55.29-PM-150x150.png" alt="Screen shot 2009-09-26 at 6.55.29 PM" width="150" height="150" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.37.06-PM.png"><img class="alignnone size-thumbnail wp-image-4548" title="Screen shot 2009-09-26 at 9.37.06 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/Screen-shot-2009-09-26-at-9.37.06-PM-150x150.png" alt="Screen shot 2009-09-26 at 9.37.06 PM" width="150" height="150" /></a></p>
<p><em>The &#8220;Greedy Switch</em>&#8220;<em> from <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse </a>on the left. On the right &#8220;The System&#8221; &#8211; click to enlarge.<a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank"></p>
<p></a></em></p>
<p>Much more to come in another post on these works, and &#8220;Toward the Sentient City.&#8221;Â  Also an update on how <a href="http://www.pachube.com/">Pachube</a> &#8211; an important part of both these projects and a very important contribution to ubiquitous computing because it creates the opportunity to connect environments and create mashups from diverse sensor data feeds &#8211; has matured since my interview with Pachube founder, Usman Haque, <a href="http://www.ugotrade.com/2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">&#8220;Pachube, Patching the Planet,&#8221;</a> in January this year.</p>
<p>In the picture above <a href="http://www.environmentalhealthclinic.net/people/natalie-jeremijenko/" target="_blank">Natalie Jeremijenko</a>, and <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank">Jonathan Laventhol</a> give the <a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">Amphibious Architecture</a> sensor array a last look over, as it will soon be lowered into the East River. Jonathan is on a busman&#8217;s holiday to help out at the pre launch of Amphibious Architecture, nr Manhattan Bridge, NYC.</p>
<p>I was very happy to getÂ  a chance to talk to <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank">Jonathan Laventhol </a>- more on our conversation in another post<em>. </em>Jonathan Laventhol is <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank">CTO of Imagination &#8211; one of the world&#8217;s leading design, events, and branding agencies.</a> We talked about the importance ofÂ <a id="r_oi" title="Jonathan Laventhol, Imagination" href="http://www.laventhol.com/about" target="_blank"> Pachube</a>, which Jonathan called the &#8220;The Facebook of Data,&#8221;Â  andÂ  how the <strong>symbiosis between brands and augmented reality</strong>, and healthcare applications, wouldÂ  be key to augmented reality emerging into the mainstream.</p>
<p><em><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_340djvd2thc_b.jpg"><img class="alignnone size-medium wp-image-4453" title="dhj5mk2g_340djvd2thc_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_340djvd2thc_b-235x300.jpg" alt="dhj5mk2g_340djvd2thc_b" width="235" height="300" /></a></em></p>
<p>Natalie Jeremijenko&#8217;s workshop at Conflux on the social negotiation of technology and how <a href="http://speedbird.wordpress.com/my-book-everyware-the-dawning-age-of-ubiquitous-computing/" target="_blank">&#8220;everyware&#8221;</a> can give us the chance to experience new forms of agency and connection was a totally inspiring.Â  And I will cover this too in another post.Â  I have so much awesome stuffÂ  to write about at the moment!</p>
<p>None of the projects in, &#8220;Toward the Sentient City,&#8221; included a mobile augmented reality, or &#8220;magic lens&#8221; component, but they all pointed to why &#8220;enchanted windows into our newly inside-out reality&#8221; are going to be so important. And why the DNA base pair of ubicomp and augmented reality can really do stuff that matters.</p>
<h3>Shangri- La &#8211; &#8220;Transfigured City&#8221;</h3>
<p><a href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/"><a href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_342g43n6w7k_b.png"><img class="alignnone size-medium wp-image-4452" title="dhj5mk2g_342g43n6w7k_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/dhj5mk2g_342g43n6w7k_b-300x249.png" alt="dhj5mk2g_342g43n6w7k_b" width="300" height="249" /></a></a></a></p>
<p>Screenshot from <a href="http://en.wikipedia.org/wiki/Shangri-La_%28novel%29" target="_blank">Shangri-La</a> episode </em><a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">Transfigured City</a></p>
<p>In my AR Consortium founder member interview series, I have found that, understandably, the visionary founders of these first augmented reality companies are a little reticent about sharing their full vision.Â  They are basically on stealth mode in this regard.Â  So as you will not, from my interview with <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> founder and CEO, Bruno Uzzan, get a fully drawn scenario of his vision for a next generation of shared augmented reality experiences, here&#8217;s a really interesting anime episode from the anime Shangri La called, <a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">Transfigured City</a>, to mull over instead.</p>
<p>As you can tell from this rather long and circuitous intro to my my conversation with Bruno Uzzan, IÂ  have been investigating shared augmented realities pretty intensively recently. And Mike Kuniavsky pointed me to <em><em><a href="http://en.wikipedia.org/wiki/Shangri-La_%28novel%29" target="_blank">Shangri-La</a></em></em>, and<a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank"> Transfigured City</a>, in a conversation with Mark Shepard, after Mark&#8217;s presentation at Conflux, <a href="http://confluxfestival.org/2009/events/workshops/mark-shepard/" target="_blank">Sentient City Survival Kit.</a></p>
<p><a href="http://thingm.com/about-us/team/mike-kuniavsky.html">Mike Kuniavsky</a> with <a href="http://thingm.com/about-us/team/tod-e-kurt.html">Tod E. Kurt</a> is founder of <a href="http://thingm.com/home.html" target="_blank">ThingM</a>, a ubiquitous computing device studio. Also Mike Kuniavsky researches, designs and writes about people&#8217;s experiences at the intersection of technology and everyday life &#8211; see Mikes blog <a href="http://www.orangecone.com/" target="_blank">Orange Cone</a>.Â  And I interviewed Mike at Etech- see<a href="../../2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/" target="_blank"> here</a>.</p>
<p>In <a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">Transfigured City</a>, the &#8220;Metal Age&#8221; group has to figure out how to share and communicate in a city transfigured by augmented realities/virtualities, where no-one sees the same place in the same way.Â  Only one character can figure out from her previous experience of the city the relationship between the transfigured city and how it used to be.</p>
<p>The conversation I had with <a href="http://www.orangecone.com/" target="_blank">Mike Kuniavsky</a> on <a id="cwnc" title="The Transfigured City," href="http://www.kazeebo.com/view/17506/shangrila-episode-14-transfigured-city/" target="_blank">The Transfigured City</a> continued at a picnic in Washington Square Park the next day with Elizabeth Goodman, who I met at Etech when she gave a brilliant presentation, <a id="eag1" title="Designing for Urban Green Space" href="http://en.oreilly.com/et2009/public/schedule/detail/5562" target="_blank">Designing for Urban Green Space</a>.Â  We covered so many areas at the picnic related to ubiquitous computing and augmented realities that this conversation probably deserves a post of its own (my writing to do list is growing longer!).</p>
<p><a id="on28" title="The Plot Synopsis for Shangri La" href="http://en.wikipedia.org/wiki/Shangri-La_%28novel%29" target="_blank">The Plot Synopsis for Shangri La</a>:</p>
<p><strong>&#8220;In the mid-21st century, the international committee decided to forcefully reduce CO2 emission levels to mitigate the global warming crisis. As a result, the economic market was transferred mainly into the trade of carbon. A great earthquake destroys much of Japan, yet the carbon tax placed on the country is not lifted, so Tokyo is turned into the worldâ€™s largest &#8220;jungle-polis&#8221; that absorbs carbon dioxide. Project Atlas is commenced to plan the rebuilding of Tokyo and oversee the government organization, which the Metal Age group opposes due to its oppressive nature. However, Atlas is only built with enough room for 3,500,000 people and most people are not allowed to migrate into the city. The disparity between the elite within Atlas and the refugees living in the jungles outside of its walls set up the background of the story.&#8221;</strong></p>
<p><strong></p>
<p></strong></p>
<p><a name="jumpto"><span style="font-size: medium;"><strong> Talking With Bruno Uzzan</strong></span></a></p>
<p><span style="font-size: medium;"><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/BrunoUzzanpost.jpg"><img class="alignnone size-medium wp-image-4494" title="BrunoUzzanpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/09/BrunoUzzanpost-225x300.jpg" alt="BrunoUzzanpost" width="225" height="300" /></a></p>
<p></strong></span></p>
<p><strong></p>
<p>Tish Shute:</strong> We won&#8217;t have fully opened the Pandora&#8217;s Box of Augmented Realities until we have ubiquitous, shared augmented realities, will we?</p>
<p><span id="p-xo" title="Click to view full content"> <strong>Bruno Uzzan: Yes. The most important for augmented reality is the experience we want to share. Now we are working on the cell phone, we can potentially do some marketing components that we already have developed now on cell phone. Done. Itâ€™s working.</strong></span></p>
<p><strong>But the most interesting part of it is how these new components [cell phone AR] will be used for marketing campaigns by brands. And we are also pretty much well positioned to transform some of the AR that we currently have working on Mac and PC and to transform these to applications working on mobile devices. </strong></p>
<p><strong>Tish Shute: </strong> We havenâ€™t really experienced yet what it means to actually share mobile AR experiences?</p>
<p><strong>Bruno Uzzan: Itâ€™s hard &#8212; we did a Facebook app. Itâ€™s a first try, it has a way to go.Â  But </strong><span id="c8ek" title="Click to view full content"><strong> to go more and more into social, is the way forward for us &#8211; to share and expand AR experiences. But yes, I mean what youâ€™re seeing is how two people on two different applications can share that same expanse.Â  For sure we are going in that direction. We are currently working on those kind of solutions. How people can share and experience together at the same time. Thatâ€™s how we start creating excitement in augmented reality, and itâ€™s coming up.</strong></span></p>
<p><strong>It&#8217;s a new market and thereâ€™s so much more in store for augmented reality. You know, some people are telling me, donâ€™t you believe that augmented reality is a gimmick? It will be a trend for a few weeks or a few months and then gone? I say, youâ€™re kidding me. This is only the beginning. I mean I can assure you that the applications that are on the market today are one percent of what we will have five years from now.</p>
<p></strong></p>
<p><strong>Tish Shute: </strong>I agree.</p>
<p><strong>Bruno Uzzan: And Iâ€™m sure that augmented reality will be a part of a lot of components that we are currently using today &#8211; GPS, web browser, glasses, I mean there are so many applications that will come up shortly. This is only the beginning. Iâ€™m completely convinced that augmented reality will be in three years from now what virtual reality is today, which is a billion dollar market.Â  I know that itâ€™s not just a gimmick of a few weeks or a few months, because so many brands are jumping into it, spending money, exploring solutions.Â  I know that itâ€™s not just short term -what they are willing to do and we are willing to do, but also middle and long term. And thatâ€™s what makes this adventure pretty much unique and what makes creating a cutting edge technology, very, very much exciting for us.</p>
<p></strong></p>
<p><span id="pb9s" title="Click to view full content"><strong>Tish Shute:</strong> First could you explain more to me about your partnership with Int13. I am not sure I understand what is in the arrangement from Total Immersion&#8217;s POV. I mean what happens re your own mobile software development? Haven&#8217;t you only been licensed the Int13 SDK for a limited period of time and have limited access to all it&#8217;s power? </span><span id="p_2y" title="Click to view full content"><a href="http://gamesalfresco.com/2009/09/15/why-int13-got-in-bed-with-total-immersion/" target="_blank">Stephane from Int13 said to Ori on Games Alfresco, here, </a>â€œwe have licensed the SDK4 for two years,â€ and then Ori asks, â€œbut you have basically kept the power to yourselves, right?â€ So if they are the only ones that can enhance it and develop the software, where willÂ  TI be in two years in mobile if you havenâ€™t really had the chance to develop your own software .</span></p>
<p><span id="j5co" title="Click to view full content"></p>
<p><strong>Bruno Uzzan: Actually itâ€™s a real win-win situation. Int13 is a very small company and they have so many requests they can&#8217;t possibly fulfill them all. SoÂ  this is a way for both of us to be, as quickly as possible, the first mobile provider for all the requests we have. Also they give us exclusivity so nobody else can use INT13 SDK for such applications.Â  I think that it is a good partnership, </strong></span></p>
<p><strong>And concerning our own mobile applicationâ€¦ First of all we have currently some mobile applications working. But with Int13 we have a mobile solution that can work on many different devices. Thatâ€™s a fact and thatâ€™s working. And, believe me you will hear from us a lot more about this soon. We are fully independent on our mobile development. The reason we closed the partnership with Int 13 isÂ  to be able to deploy mobile in a broad way.</strong></p>
<p><strong> I mean you know that the difficulty with AR mobile is that each separate device needs some customization. Working on the iPhone is different from working on the Nokia, different from working on the Palm; itâ€™s different from working on the Samsung. Each of them have their own operating system inside and so we were interested in Int13&#8242;s very clever embedded solution that allows our solutions to work across many platforms.</strong></p>
<p><strong>The reason we are working with Int13 is that we are able to work on so many mobile devices, thanks to Int13. And in the mobile AR race that we are currently in, the next two years will be extremely important to usâ€¦</strong></p>
<p><span id="z_5s" title="Click to view full content"><strong>Tish Shute:</strong> OK, that definitely clarifies it a lot. So Int13 has done an embedded solution to allow TI developed AR solutions to work easily across many devices?</span></p>
<p><span id="y.wt" title="Click to view full content"><strong>Bruno Uzzan: YesÂ  they have kind of an embedded solution, a way to address extremely quickly new cell phone&#8230; But, currently on our side, we are in discussions with a mobile companyâ€¦ and that only refers to some very specific mobile devices.Â  And what they have is also a way to embed deeper our technology into mobile, so that we can have quickerâ€¦ applications that work on a large number of cell phones.</strong></span><span id="mufh" title="Click to view full content"> </span></p>
<p><strong>Tish Shute:</strong> So, basically it means you don&#8217;t have to go through some complicated negotiations with each of the cell phone companies, is what you are saying?</p>
<p><strong>Bruno Uzzan: Not only negotiations, but also hard development. You know? Working on the Windows mobile is completely different from working on the Palm OS. You know, that&#8217;s different! Its a big work, to have a mobile application working on many other devices. So, INt13,Â  provides us a way for us to save some time and some development cost too.</strong></p>
<p><strong>Tish Shute:</strong> And Int13 doesn&#8217;t have powerful AR development tools like <a href="http://www.t-immersion.com/en,interactive-kiosk,32.html" target="_blank">D&#8217;fusion</a> right?</p>
<p><strong> Bruno Uzzan: Right! That&#8217;s right. That&#8217;s why we say it&#8217;s a true win-win solution. They can benefit from our work too. And we can benefit from their work, in order to deploy quicker and faster mobile solutions. </strong></p>
<p><strong>Tish Shute:</strong> Now, the second thing isâ€¦ there is a lot of debate and disagreement about how far mobile augmented reality is from delivering something more that the &#8220;post it&#8221; approach that has been much publicized in recent months, via all the AR browser apps.</p>
<p>But from my understanding from the conversation we had earlier this summer (see below), Total Immersion is targeting a much higher level of mobile augmented reality than we&#8217;ve seen to date?</p>
<p><strong>Bruno: Yes the browser apps we have seen are a kind of augmented reality, but not exactly the way we see it. Let me explain you why. With this kind of application it&#8217;s true that you can overlay 3D-information and video. That&#8217;s a fact. So, in a sense, that&#8217;s augmented reality. But the way that they are working on the position of the 3D on that video is that they are using compass and GPS-information.. so it means that this AR solution will work only on some building and some physical objects that are FIXED. In a fixed and known position.</strong></p>
<p><strong>So you want to go to a theater?</strong></p>
<p><strong> </strong><span id="a9qv" title="Click to view full content"><strong>The theater is here, for sure it will not move, so you know the position of the theater, and thatâ€™s a fact that you can superimpose an object on the theater. Thatâ€™s what can be done currently. What we are achieving and what we are doing on mobile is more than that. We want to be able to port our solution with trading cards, with brands, into a smart phone.</strong></span></p>
<p><strong>Iâ€™m assuming that you want a can, a drink can, to be able to trigger an experience. The only way you can do it is to be able to understand what the can, it is. And the current solutions that are out there canâ€™t do that, itâ€™s impossible. </strong></p>
<p><strong>Tish Shute:</strong> Right, yes. Thereâ€™s no near-field object at all in these early browser apps.</p>
<p><strong>Bruno Uzzan: And the solution we have is that we can recognize a can and then &#8212; in a very, very precise way and that activates geo-location, so we can superimpose 3D. I mean in that case, it opens up all the applications that we currently have, so they could work on mobile.</strong></p>
<p><strong>Tish Shute:</strong> So for example, if youâ€™re working with a soft drink company, people can trigger that experience wherever they see that can?</p>
<p><strong>Bruno Uzzan: Correct. </strong></p>
<p><strong>Tish Shute:</strong> Yes. Yes, I assumed that was what youâ€™re doing</p>
<p><strong>Bruno Uzzan: We believe &#8212; and maybe thatâ€™s not the case, but we believe that our marker-less tracking technology is pretty much unique on the mobile devices.</strong></p>
<p><strong>I havenâ€™t seen yet, from anyone, a full augmented reality mobile solution working.</p>
<p></strong></p>
<p><span id="rzqr" title="Click to view full content"><strong>I really see AR being part of the Web 3.0 next generation. I mean the vision I have is that, you know &#8212; today, when you want to have information, you go on a website and then you find your information. AR &#8212; and the future is that I think it will be part of the opposite. You want to have information about a product, you just show it to your computer and the information will automatically pop up. I see here a new way to market some key messages, a new way to get information is that some physical product by themselves could be a way to get information, and you donâ€™t have to search anymore for them, itâ€™s coming out to you.</strong></span></p>
<p><strong>AR is definitely for me, one of these components. Another thing that AR is a solution, another thing that AR itself will create these kind of results in how information is being displayed. But Iâ€™m seeingÂ  here a way that could be part of a new way to have access to information. And thatâ€™s part of the vision I have. Whatever, if it is through mobile phone or web or PC, Mac, whatever, I really believe that now this kind of new generation of receiving information will come shortly and could be a kind of a new &#8212; could be part of the new 3.0 generation of the web. </strong></p>
<p><strong>Tish Shute:</strong> My friend <a id="evae" title="Gene Becker" href="http://www.genebecker.com/" target="_blank">Gene Becke</a>r did <a href="http://www.genebecker.com/2009/09/thinking-about-design-strategies-for-magic-lens-ar/" target="_blank">an interesting post recently on some of the current limitations of mobile AR</a> where he pointed out the problem of:</p>
<p><em><strong>&#8220;S</strong><strong>implistic, non-standard data formats</strong> â€“ POIs, the geo-annotated data that many of these apps display, are mostly very simple one-dimensional points of lat/long coordinates, plus a few bytes of metadata. Despite their simplicity there has been no real standardization of POI formats; so far, data providers and AR app developers are only giving lip service to open interoperability. Furthermore, they are not looking ahead to future capabilities that will require more sophisticated data representations. At the same time, there is a large community of GIS, mapping and Geoweb experts who have defined open formats such asÂ <a href="http://georss.org/" target="_blank">GeoRSS</a>,Â <a href="http://geojson.org/" target="_blank">GeoJSON </a>andÂ <a href="http://code.google.com/apis/kml/documentation/" target="_blank">KML</a> that may be suitable for mobile AR use and standardization.&#8221;</p>
<p></em> <span id="gd8y" title="Click to view full content"></p>
<p><strong></p>
<p></strong></span><span id="v68s" title="Click to view full content"><strong> Bruno Uzzan: Thatâ€™s interesting. I mean &#8212; I know exactly what his is referring to. He is mainly referring to a localization and how you can have a quick, accurate localization.Â  If you look at current solutions, and you look at this 3-D superimposing on the video, the 3-D is shaking a lot. I donâ€™t know if you see that in some of these early efforts.</strong></span></p>
<p><strong>Itâ€™s hard to use because the 3-D, you know, isÂ  part of the magic of augmented reality, that is when the 3-D is being inserted in a very easy way and smooth way in your solution. Here, when you see this overlay, 2-D or 3-D overlaid on the video, itâ€™s shaking a lot. One reason for this is that the GPS compass is not accurate enough to coordinate the perfect location of the user. And here, what Gene says is interesting. I think we are addressing this localization issue in a pretty smart way.</strong></p>
<p><strong>But to be frank with you, I donâ€™t believe mobile augmented reality in the extremely short term &#8212; Iâ€™m talking about three weeks, one, two months is mature enough for good AR applications.Â  It will be shortly.Â  But for now it is more proof of concept than a true and easy application to use. </strong></p>
<p><strong>But we are starting to see a lot of new application coming out, but I really believe that marketing and entertainment are the two key markets for AR right now.</strong></p>
<p><strong>Iâ€™ve been working ten years in augmented reality. And, eight years ago, when I was talking about augmented reality, I was E.T., you know? Nobody understood what I said, and I thought it was crazy. And now, today, yes itâ€™s completely different.</strong><strong> </strong></p>
<p><strong> </strong></p>
<p><strong>Tish Shute:</strong> The Pandora&#8217;s Box of Augmented Realities, in my view, is an open, universal and standard, distributed, multiuser, augmented reality framework fully integrated with the internet and world wide web. I have been looking into Google Wave protocols as a basis for this would you be interested in this? Do you think it is feasable?</p>
<p><span id="ngwf" title="Click to view full content"> </span><span id="vz68" title="Click to view full content"><strong> </strong></span></p>
<p><span id="vz68" title="Click to view full content"><strong>Bruno Uzzan: I think this is feasible. I think that&#8217;s doable, that&#8217;s justÂ  in my opinion. I mean some people might have another kind of opinion but I think that that&#8217;s definitely doable.</strong></span></p>
<p><strong>Tish Shute:</strong> Yes I suppose an open AR Framework involves cooperation and collaboration, it is more about business and politics than technological problems.</p>
<p><strong> Bruno Uzzan: Yes!Â  Actually the Web is politics. Business is politics. </strong></p>
<p><span id="yeg4" title="Click to view full content"><strong>Tish Shute: </strong>I would be interested if anyone in your R&amp;D team would be interested in looking at some of the ideas that are emerging in our little discussion of Google Wave and an Open AR FrameworkÂ  to offer feedback. it is an interesting time now to input on the Wave Federation Protocol docs because nothing is set it stone right now.</span></p>
<p><span id="hzrf" title="Click to view full content"><strong>Bruno Uzzan: Just shoot me an email, I&#8217;ll try to put you in touch with the right person and, and a team member that can input on this.</strong></span></p>
<p><span id="hbcd" title="Click to view full content"><strong>Tish Shute: </strong>For mobile augmented reality the best thing weâ€™ve got now is the phone, right?</span></p>
<p><strong>Bruno Uzzan: Right. </strong></p>
<p><strong>Tish Shute:</strong> And the only way we can use the phone is by holding it up, right?Â  Isnâ€™t this a bit of an an obstacle as you introduce better object recognition and tracking?Â  People are going to have to stop moving to use their phone. What do you feel about that experience? Isn&#8217;t AR eyewear and essential part of a tightly registered AR experience?</p>
<p><strong></p>
<p>Bruno Uzzan: </strong>We donâ€™t do hardware and we donâ€™t have the current solution for eyewear that would do all we need for a good mobile AR experience, so I guess we donâ€™t have the current answer for that.Â  But we are beginning to see the next generation of this &#8212; of these glasses.</p>
<p><strong>Tish Shute:</strong> But youâ€™re happy enough with the mobile experience of augmented reality on smart phones that youâ€™re investing in this next generation of software for this.</p>
<p><strong>Bruno Uzzan: Yes, I know. We know that some application will not work on the iPhone. And yes, whatever you do, you still need to hold the iPhone, so it means that you canâ€™t play with your hands anymore. So we know that partially, some AR solutionsÂ  we have on other platforms will lose the magical effectivities on just the iPhone.</strong></p>
<p><strong>But Iâ€™m starting to see on the market some glasses that could perhaps be not too expensive &#8212; thatâ€™s a challenge!Â  And easy to use &#8212; thatâ€™s another big challenge. And, that could fit on anybodyâ€™s faces and head &#8212; there&#8217;s another big challenge. So yes, Iâ€™m starting to see that, but so far AR glasses are only applicable for some very, very specific application, like design or theme park or, you know, some specific location where it makes sense to move forward with glasses.</p>
<p></strong></p>
<p><strong>I donâ€™t believe that kids will use glasses for &#8212; in our toys and for games in the next months or maybe othe next one or two years. But maybe something will come out shortly and that could be a big breakthrough, and enable us to think another way. ButÂ  from what we have seen so far and from what we know in this hardware market, I donâ€™t believe that currently there is a workable solution.</p>
<p><span style="font-size: small;"></p>
<p></span></strong> <span style="font-size: small;"><strong></p>
<p></strong></span><span style="font-size: medium;"><span style="font-size: small;"><strong>Note: The following section of the interview took place earlier in the Summer.</strong></span></p>
<p></span><span id="yvdi" title="Click to view full content"></p>
<p><strong>Tish Shute:</strong> You are the first commercial AR companyÂ  &#8211; you started in 1999 right?</p>
<p><strong></p>
<p>Bruno Uzzan: Yes you are right. We started the extremely early in this augmented reality market. We were the first company worldwide to start doing augmented reality and to start promoting augmented reality. So it&#8217;s true, we are pretty old players although the market has been getting bigger and bigger for the last year and a half. So for a long time we were only in the market, and the market was not really there.</strong></span></p>
<p><strong>But for the past 8 months, the company has been growing really fast.</strong></p>
<p><strong>Tish Shute:</strong> Yes I&#8217;m sure. Congratulations for hanging in there long enough to get the pay off!</p>
<p><strong> Bruno Uzzan: You know, my background is Financial. So I have been driving the company for many years in a very cash efficient way. So we have been waiting for the markets to reach maturity before starting make some investments. So that&#8217;s the reason we are still here, and that&#8217;s the reason I think we managed pretty smartly the cash that we raised for the company.</strong></p>
<p><strong>Tish Shute:</strong> Yes there is a saying that when a market takes off you can tell a pioneers because they are the ones with the arrows in their backs. But I am glad you are dodging the arrows!</p>
<p><strong>Bruno Uzzan: You know, I&#8217;ve always driven the company with revenue. And because revenue was not there at the beginning I was extremely cautious about the cash. So now that the company is getting some revenue, for sure we are making more and more investments, and taking advantage of our situation as a worldwide leader of augmented reality.</strong></p>
<p><strong>This situation is not easy as it appears today but it&#8217;s now getting better, as you can see, AR, Augmented Reality, has very good momentum and we are benefiting a lot from all this momentum for augmented reality right now.</strong></p>
<p><strong>Tish Shute:</strong> You&#8217;ve been very involved in researching developing augmented reality tools. Are you still as active in the research area, or are you too busy keeping up with work for hire now, to be working on research and building new technology for Augmented Reality?</p>
<p><strong>Bruno Uzzan: Both. First of all, we are part of lot of projects either directly with clients like Mattel or with some partners that are using our technology to promote and develop other AR projects. From what we he have seen, many, many, many, projects augmented projects have been done currently with our solutions.</strong></p>
<p><strong>To continue with your previous question. So we are being perceived as this leader in that space, and weÂ  have some pretty heavy demand for our services. But we are coming up with new technology, of course, still connected to Augmented Reality.Â  But, our R &amp; D is working in two different directions, which of course also bind together.</strong></p>
<p><strong>The first one is platform developments. So we want </strong><strong>Augmented Reality to work with as many platforms as possible &#8211; PC, Mac, Mobile, Game Consoles, all those are the platforms that we are targeting. We are currently doing lot of work in the R &amp; D team in cross platform compatibility</strong><strong>.</p>
<p></strong></p>
<p><strong>Tish Shute:</strong> Robert Rice said recently, &#8220;markers and webcams equal Photoshop page curls&#8230;&#8221;</p>
<p><span id="dulu" title="Click to view full content"></p>
<p><strong>Bruno Uzzan: Yes. There are so many concerns with markers. The quality is extremely bad. As soon as you hide a part of the marker, a slight part of the marker, youâ€™re dead. You canâ€™t track any more of the object. So compared to our solution where I want to say play with cards or where you are going to play with a Mattel toy, even if you hide a part of the toy, itâ€™s still working.</strong></span></p>
<p><strong> Tish Shute:</strong> But you havenâ€™t offered the public an SDK to your engine right? Basically the way people get access to your tools is working in a partnership with Total Immersion right?</p>
<p><strong>Bruno Uzzan: Correct. </strong></p>
<p><strong>Tish Shute:</strong> Do you think in the future you might open your SDK? Are you considering that?</p>
<p><strong></p>
<p>Bruno Uzzan: Yes, it would be interesting. </strong></p>
<p><strong>Tish Shute:</strong> So that is something we can see coming soon?</p>
<p><span id="short_transcription0" title="Click to view full content"><strong>Bruno Uzzan: Maybe, because itâ€™s true that Total Immersion is starting to be mature enough for these kind of tools. The only thing is that we have to respect good timing for that.Â  Itâ€™s a big decision. You know what I mean?Â  It is a big, big decision. We would then compete with others using our technology. </strong></span></p>
<p><strong>Tish Shute:</strong> Oh I know, it is a big decision when you have so much skin in the game! But it would be nice to have your SDK being THE platform for AR, wouldn&#8217;t it?</p>
<p><strong> Bruno Uzzan: It is a really big decision that we canâ€™t just take like that, you know.Â  There are a lot of friends who told me you have to be extremely careful about timing. This timing is pretty much connected to the maturity of the market. For sure, we see the market being more and more mature. But, there are a lot of low hanging fruits we still want to address</strong></p>
<p><strong>To get the best value possible for all the publicity we have and all the clients we have now. </strong></p>
<p><strong>Tish Shute:</strong> Yes, I know. Youâ€™ve been in this game so long. Now, there is an interesting question here though about tools and platforms because you know, A.R., augmented reality has already expandedÂ  beyond its kind of original purist definition. And when I talk to peopleÂ  about augmented reality, there are actually lot of different ideas and priorities of where the tools should go right now. You know, obviously we have these kind of browser-like applications, but these browser like applications are not dealing with recognizing near field objects yet.Â  What are your priorities for tool development and what are your priorities for AR development in the future? What areas are you going to focus on? Oh dear that is a rambling question!</p>
<p><strong>Bruno Uzzan: [laughter]Â  So, one of our first priorities is we need to create our software with one development, one installer, one software that can be spread on different platforms. The same application, the same software can be used either on a PC, Mac, phone or console. So thatâ€™s a lot of work, because that means that our platform has to address many many different devices and thatâ€™s a big priority for us because we received this request from our clients. We want to be able to use one application on many different platforms and devices. So, thatâ€™s the first one.</p>
<p></strong></p>
<p><strong><span id="hk3z" title="Click to view full content">And the second one is to add more and more interactivity between the real and the virtual world. So, we are working on some improvements to add some real components that will interact with virtual, and that also part of our big strategy and direction and these two worlds can more and more be bridged together, linked together so they can interactÂ  one with the other.</span></strong></p>
<p><strong>Our R&amp;D guys are working on the real world interacting more with the virtual world.Â  And I have started seeing some results which are pretty much crazy and this will be ready for next year.</p>
<p><br style="background-color: #ffff00;" /></strong><span id="b1qt" title="Click to view full content"><strong> There are so many different directions for interaction between the real world and virtual world to develop.Â  Iâ€™m sure ten years from now youâ€™re going to have AR applications everywhere.Â  Its not just temporary fashion stuff or a gimmick for few months. I mean we are getting there, its getting stronger and stronger and we are getting a good adoption rate from our consumers. They like it, they test it, they play with it and brands wants more, people want more and its getting bigger and bigger.</p>
<p></strong></p>
<p><strong>Tish Shute:</strong> Yea and I totally agree, its not a gimmick because the interaction between &#8220;virtual&#8221; and &#8220;real&#8221; enhances the magic of both. Another question about you RandD operation. Is your R&amp;D still in France or have you moved totally out to LA.</span></p>
<p><strong>Bruno Uzzan: We are 50 people in France and I started this LA office two years ago and I moved permanently two years to LA. So Iâ€™m now permanently located in the US to take care of the US office, knowing that revenues are really getting bigger and bigger in the US. So it means that we are getting a lot of traction, working with large company and now Iâ€™m currently located in the US.</strong></p>
<p><strong>Tish Shute:</strong> My sister lives in Paris. Could I visit your R&amp;D lab at some point? Iâ€™d love to visit!</p>
<p><span id="bt1e" title="Click to view full content"><strong>Bruno Uzzan: Yeah sure sure sure. I mean if you want to go. You wonâ€™t have access to all the research. But if you want to go out and meet all the team please do.</strong></span></p>
<p><strong>Tish Shute:</strong> Iâ€™d love to.</p>
<p><strong> Bruno Uzzan: No problem. Shoot me an Email you and I will introduce you to Eric Gehl, COO, he is the COO of the French team. And he can definitely take care of that. </strong></p>
<p><strong>Tish Shute:</strong> That would be fun. Thank you!</p>
<p>Recently, AR browser applications have really caught the imagination of the web community, eg., Layar and Wikitude?Â  Where do you think the most important market for AR is at the moment<span id="k6fx" title="Click to view full content">, entertainment,Â  green tech, business, education?</span></p>
<p><strong>Bruno Uzzan: I think that all that you mention will be important. The first one that did grab my attention is entertainment particularly dual marketing, because they always searching for new ways to interact with players or the consumers.Â  But itâ€™s just the tip of the iceberg, you know, I mean medical applications could be huge using augmented reality. Education, and edutainment is definitely using more and more augmented reality components.Â  And I will just be submitting with big companies â€“ that are considering using augmentation for education. Museums are very important too. Also augmentation as a kind of free sales tool, you know there are so many applications, design, architecture &#8211; so many directions that itâ€™s hard to say today which one will take the lead.</strong></p>
<p><strong>But I do believe that on the short term the ones that are really really moving fast are the entertainment business and the digital marketing business. </strong></p>
<p><strong>Tish Shute:</strong> What do you think are the biggest shortcomings with current augmented reality and what are the obstacles that no one has solved yet?</p>
<p><strong>Bruno Uzzan: I think the cell phone is not fully ready for augmented reality â€“ a lot of people are working on that but there are still a lot of constraints to get the augmented reality working on a cell phone and I think that from what I heard a lot of manufacturers and a lot of companies are working from direction that are going to help us a lot to develop some great cell phone applications.</strong></p>
<p><strong>And I think thatâ€™s one of the biggest part of the game. All the applications that you see on cell phones so far are just gimmicks â€“ the next big key is how to transform some gimmick cell phone application to a real, industrial, robust application that&#8217;s going to work on a cell phone. So I think thatâ€™s a big challenge for this year. </strong></p>
<p><strong></p>
<p>Most of what we see now is just matching and overlaying some 2d components in a video. This is not what I call AR.Â  Youâ€™re far away â€“ with this kind of application, you are far away from doing the registration that we need to do â€“ you canâ€™t do it. So here&#8217;s the challenge: &#8220;how can you get a Topps is an application working on cell phone. Thatâ€™s the big challengeÂ  How we can make that work!&#8221;</strong> <strong> You can&#8217;t today get a real AR Topps application working on cell phone because there&#8217;s no cell phoneÂ  thatâ€™s actually ready. But we are working on it and the first one that can make that work, itâ€™s going to be huge.</strong></p>
<p><span id="b9-2" title="Click to view full content"><strong>When you are working with good AR components you need a lot of CPU and GPU programs. So today new cell phone have started to be more and more ready for augmented reality but you need a really good cell phone to make it work. You canâ€™t choose an old cell phone to make it work because you have some recognition, you have some tracking, you have some rendering, so you canâ€™t choose a Nokia cell phone two years old to make that work. For sure the newest iPhone is the one that can make it work, but thatâ€™s it for now. There is a lot of research â€“ from large cell phone companies â€“ to get more CPU and GPU into their cell phone.Â  But so far we are also waiting for these devices to be released to consumers.</strong></span></p>
<p><strong>Tish Shute: </strong>And the current economic climate has put a damper on MIDs hasn&#8217;t it. But who can tell? It depends what price points some new MID came out at right?</p>
<p><strong></p>
<p>Bruno Uzzan: Correct.</strong></p>
<p><strong>Tish Shute:</strong> Yes,I agree. But basically whatâ€™s interesting, the interesting thing is, the iPhone can deliver so much of what is necessary and even if Apple hasn&#8217;t given access to the full power of the iphone to AR developers yet, there is really no going back now &#8211; the mobile augmented reality cat is out of the bag!</p>
<p><strong>Bruno Uzzan: Youâ€™re right, youâ€™re fully right. </strong></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/feed/</wfw:commentRss>
		<slash:comments>36</slash:comments>
		</item>
		<item>
		<title>Twitter and The Web of Flow: Talking with Stowe Boyd &amp; Bruce Sterling about Microsyntax, Squelettes, Favela Chic and the State of Now</title>
		<link>https://www.ugotrade.com/2009/06/28/twitter-and-the-web-of-flow-talking-with-stowe-boyd-bruce-sterling-about-microsyntax-squelettes-favela-chic-and-the-state-of-now/</link>
		<comments>https://www.ugotrade.com/2009/06/28/twitter-and-the-web-of-flow-talking-with-stowe-boyd-bruce-sterling-about-microsyntax-squelettes-favela-chic-and-the-state-of-now/#comments</comments>
		<pubDate>Sun, 28 Jun 2009 18:23:28 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[#140conf]]></category>
		<category><![CDATA[Aaron Straup Cope]]></category>
		<category><![CDATA[aesthetics of streaming]]></category>
		<category><![CDATA[asymmetric follow]]></category>
		<category><![CDATA[asynchronous web versus synchronous web]]></category>
		<category><![CDATA[being a character]]></category>
		<category><![CDATA[bottom up informatics]]></category>
		<category><![CDATA[brian solis]]></category>
		<category><![CDATA[brightkite]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Bruce Sterling on Twitter]]></category>
		<category><![CDATA[Clay Shirky]]></category>
		<category><![CDATA[CNN and Twitter]]></category>
		<category><![CDATA[cross-links keywords and networks]]></category>
		<category><![CDATA[data shadows]]></category>
		<category><![CDATA[evolution of microsyntax]]></category>
		<category><![CDATA[Favela Chic]]></category>
		<category><![CDATA[favela chic and bottom up informatics]]></category>
		<category><![CDATA[geoslashes]]></category>
		<category><![CDATA[Google and Twitter]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[googlewave]]></category>
		<category><![CDATA[Gothic High Tech]]></category>
		<category><![CDATA[hash tags]]></category>
		<category><![CDATA[hash tags on Twitter]]></category>
		<category><![CDATA[high rise favelas]]></category>
		<category><![CDATA[hybrid vigor]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[Interactions Magazine]]></category>
		<category><![CDATA[Iran and Twitter]]></category>
		<category><![CDATA[iran election and Twitter]]></category>
		<category><![CDATA[Iranian Twitters]]></category>
		<category><![CDATA[Jack Dorsey]]></category>
		<category><![CDATA[Jeff Pulver]]></category>
		<category><![CDATA[Kevin Slavin]]></category>
		<category><![CDATA[Lars and Jens Rasmussen]]></category>
		<category><![CDATA[LIFT]]></category>
		<category><![CDATA[Lift Conference 2009]]></category>
		<category><![CDATA[magic words]]></category>
		<category><![CDATA[Mark Vanderbeeken]]></category>
		<category><![CDATA[Michael Jackson and Twitter]]></category>
		<category><![CDATA[Microsyntax]]></category>
		<category><![CDATA[Microsyntax and Twitter]]></category>
		<category><![CDATA[Microsyntax.org]]></category>
		<category><![CDATA[New Depression]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[pachube google wave and microsyntax]]></category>
		<category><![CDATA[Prada Goth]]></category>
		<category><![CDATA[real time search]]></category>
		<category><![CDATA[reboot11]]></category>
		<category><![CDATA[semantic web]]></category>
		<category><![CDATA[semweb]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[SMS messages in Iran]]></category>
		<category><![CDATA[social web]]></category>
		<category><![CDATA[Squelettes]]></category>
		<category><![CDATA[Stowe Boyd]]></category>
		<category><![CDATA[streamy aesthetics of sensors]]></category>
		<category><![CDATA[stuffed animals]]></category>
		<category><![CDATA[stuffed animals and failed states]]></category>
		<category><![CDATA[stuffed animals and regulatory capture]]></category>
		<category><![CDATA[The 140 Characters Conference]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[The Now Web]]></category>
		<category><![CDATA[The State of Now]]></category>
		<category><![CDATA[The Web of Flow]]></category>
		<category><![CDATA[Things That Twitter]]></category>
		<category><![CDATA[Tim O'Reilly on Google Wave]]></category>
		<category><![CDATA[Tish Shute]]></category>
		<category><![CDATA[Tweet Deck]]></category>
		<category><![CDATA[twitter]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[webthropology]]></category>
		<category><![CDATA[Wyclef Sean and Twitter]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3835</guid>
		<description><![CDATA[I met Stowe Boyd, of Microsyntax.org at Jeff Pulverâ€™s 140 Characters Conference which convened in the middle of a perfect storm for the State of NOW (more mundanely known as the real time web) as thousands of tiny Twitter pipes became a vital conduit for the historic events occurring in Iran (picture on left, Stowe [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/stoweboyd2.jpg"><img class="alignnone size-medium wp-image-3851" title="stoweboyd2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/stoweboyd2-296x300.jpg" alt="stoweboyd2" width="296" height="300" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/BruceSterlingAtReboot.jpg"><img class="alignnone size-medium wp-image-3971" title="BruceSterlingAtReboot" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/BruceSterlingAtReboot-297x300.jpg" alt="BruceSterlingAtReboot" width="297" height="300" /></a></p>
<p>I met <a href="http://www.stoweboyd.com/" target="_blank">Stowe Boyd,</a> of <a href="http://www.microsyntax.org/" target="_blank">Microsyntax.org</a> at Jeff Pulverâ€™s <a href="http://www.140conf.com/" target="_blank">140 Characters Conference</a> which convened in the middle of a perfect storm for <a href="http://pulverblog.pulver.com/archives/008934.html" target="_blank">the State of NOW</a> (more mundanely known as the real time web) as thousands of tiny Twitter pipes became a vital conduit for the historic events occurring in Iran (picture on left, Stowe Boyd, from <a href="http://www.briansolis.com/" target="_blank">Brian Solis</a>&#8216; Flickr <a href="http://www.flickr.com/photos/briansolis/3569544825/" target="_blank">here</a>, and on the right, Bruce Sterling, presenting at <a href="http://www.reboot.dk/" target="_blank">reboot11</a> from <a title="Link to scriptingnews' photostream" rel="dc:creator cc:attributionURL" href="http://www.flickr.com/photos/scriptingnews/">scriptingnews</a>&#8216; Flickr <a href="http://www.flickr.com/photos/scriptingnews/3662894176/" target="_blank">here)</a>.</p>
<p>But, <a href="http://blog.ted.com/2009/06/qa_with_clay_sh.php" target="_blank">as Clay Shirky pointed out,</a> re Twitter and Iran:</p>
<p><strong>â€œItâ€™s incredibly messy, and the definitive rules of the game have yet to be written. So yes, weâ€™re seeing the medium invent itself in real time.â€</strong></p>
<p>Stowe Boyd is  managing director of <a href="http://www.microsyntax.org/">Microsyntax.org</a>, a non-profit investigating the embedding of structured information within microstreaming applications, particularly Twitter. It is a communitarian project so if you are interested you should get involved &#8211; see Stoweâ€™s #140conf. presentation, <a href="http://blip.tv/file/2267166" target="_blank">â€œThe evolution of Microsyntax.&#8221;</a> Stowe is an architect of &#8220;flow&#8221; and a webthropologist of the State of NOW.Â  I had the opportunity to talk with him at the conference (<a href="#StoweInterview">see the full conversation below</a>). We talked not only about some of the practicalities of implementing microsyntax but about how &#8220;the web of flow&#8221; produces a fundamental shift in how we communicate, and who we are.Â  As Stowe Boyd put it:</p>
<p><strong> â€œYou use these tools, and you are changed. And itâ€™s just a question of how long you use them and the longer you use them, the more you use them, the more changed you are. When people shift to a basis of sociality around connection with other people as opposed to mass affiliation, itâ€™s different. Itâ€™s completely different. Your whole system of ethics, the way you judge the world and decide whatâ€™s important is different. And not only different itâ€™s better. Itâ€™s a better way to deal with the world.â€</strong></p>
<p>As Wyclef Sean (@<a href="http://twitter.com/wyclef" target="_blank">wyclef</a>) remarked at #140conf, <strong>â€œTwitter just cuts the middle man in everything.â€</strong></p>
<p>At the 140 Characters Conference it was hard not to be captivated by the energy and optimism arising from the successful use of Twitter by Iranians to communicate in the aftermath of the election.Â  But the subsequent repression in Iran, in which the regime took advantage of central infrastructure controls to silence Iranian twittering (we have similar network technologies in place here in the US), leaves a big question that came to the fore after the conference:</p>
<p>While these real time applications give us the ability to leverage network effects in totally new ways, and they have enormous potential to make our lives better, do we need to give more thought to the infrastructure they rely on?</p>
<p><a href="http://pulverblog.pulver.com/archives/008957.html" target="_blank">The videos for the 140Conf</a> are up now. If you havenâ€™t already seen them, after watching Jeff Pulverâ€™s intro to <a href="http://pulverblog.pulver.com/archives/008950.html" target="_blank">The State of NOW</a> a great place to start is the <a href="http://blip.tv/file/2260001" target="_blank">â€œTwitter as a News Gathering Toolâ€</a> (Part 2).Â  Also see <a href="http://www.observer.com/2009/media/cnns-rick-sanchez-todays-ann-curry-stand-their-twitter-iran-coverage" target="_blank">Ann Curry Defends Foreign Correspondents, Twitter; Rick Sanchez Defends CNN</a> and Brian Solisâ€™ <a href="http://www.techcrunch.com/2009/06/17/is-twitter-the-cnn-of-the-new-media-generation/">post on techcrunch</a>. Christopher R. Weingarten (<a href="http://twitter.com/1000timesyes" target="_blank">@1000TimesYes</a>), <a href="http://pulverblog.pulver.com/archives/008954.html" target="_blank">â€œTwitter and the End Of Music Criticism,â€</a> and <a href="http://www.moeed.com/" target="_blank">Moeed Ahmad&#8217;s</a> (<a href="http://twitter.com/moeed" target="_blank">@moeed</a>), <a href="http://www.moeed.com/blog/2009/05/20/gaza-focus-media-140-conference-london" target="_blank">Gaza in Focus</a>, are two of several must see presentations. The #140Conf was an extraordinary event.Â  Jeff Pulver orchestrated a brilliant cast of characters and a manifestation of social media â€œhybrid vigorâ€ that was exhilarating to be part of.<span><span> </span></span></p>
<p>A â€œDirectorâ€™s Cutâ€ of <span><span>#140conf will be re-broadcast (Monday, June 29th and Tuesday, June 30th) at 11AM EST / 8AM PST &#8211; <a rel="nofollow" href="http://140conf.com/watchit" target="_blank">http://140conf.com/watchit</a>. </span></span>Some of the speakers will be tweeting while their session is being re-broadcast (<a href="http://pulverblog.pulver.com/archives/008960.html" target="_blank">see The Jeff Pulver Blog for more</a>).</p>
<p><strong><strong> </strong></strong><strong> </strong></p>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3635038955_2998f2a9e1_b.jpg"><img class="alignnone size-medium wp-image-3886" title="3635038955_2998f2a9e1_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3635038955_2998f2a9e1_b-300x200.jpg" alt="3635038955_2998f2a9e1_b" width="300" height="200" /></a></span></span></p>
<p>(picture above from <a href="http://www.briansolis.com/" target="_blank">Brian Solis&#8217;</a> Flickr<a href="http://www.flickr.com/photos/briansolis/3635038955/sizes/l/in/set-72157619870975030/" target="_blank"> here</a>)</p>
<p>In a serendipitous convergence of events I found myself in the front row taking photos <a href="http://www.flickr.com/photos/briansolis/sets/72157619870975030/" target="_blank">for Brian Solis</a> (@briansolis) see Brian&#8217;s post, <a href="http://www.briansolis.com/2009/06/is-twitter-the-cnn-of-the-new-media-generation/" target="_blank">&#8220;Is Twitter the CNN of the New Media Generation.&#8221;</a> I like <a href="http://www.flickr.com/photos/briansolis/3635866464/in/set-72157619870975030/" target="_blank">my photo of Jack Dorsey</a> (@jack) Twitter founder &#8211; the lens of my own camera would never have allowed for this one!</p>
<p>I was also sitting close to Stowe Boyd (@stoweboyd), who out of all of attendees at this jam packed event was one of the people I had most hoped to connect with.</p>
<p><span style="font-size: medium;"><strong>Talking with Bruce Sterling</strong></span><span style="font-size: medium;"><strong> about Squelettes, Twitter, Favela Chic, and Gothic High Tech<br />
</strong></span></p>
<p>I have been following the <a href="http://microsyntax.org/" target="_blank">microsyntax.org</a> effort that Stowe has been leading since <a href="http://www.wired.com/beyond_the_beyond/2009/05/spime-watch-pachube-feeds/" target="_blank">this post by Bruce Sterling  (@bruces) on Pachube Feeds</a> which contained this challenge:</p>
<p><strong>â€œ(((Extra credit for eager ubicomp hackers: combine this [<a href="http://www.pachube.com/" target="_blank">pachube</a> feeds] with Googlewave, then describe it in microsyntax. Hello, 2015!)))â€</strong></p>
<p>Stowe pointed out in our conversation at #140conf, that Microsyntax.org is in one sense a very narrow project but on the other hand itâ€™s very broad, because every sort of information that you can imagine is going to be streaming through Twitter and related [real time] applications.</p>
<p>Or as <a href="http://www.aaronland.net/" target="_blank">Aaron Straup Cope</a> put it to me: <strong>â€œThis is ultimately the â€œmagic wordâ€ problem, which is essentially the semweb vs. google-is-smarter-than-you problem.â€</strong></p>
<p>There are a bunch of crystal ball posts up at the moment looking into the future of the real time webâ€¦. for example, <a href="http://threeminds.organic.com/2009/06/docs_are_old-school_we_need_pa.html?utm_source=twitter&amp;utm_medium=threeminds&amp;utm_campaign=praise" target="_blank">this post on threeminds.organic</a> (via @timoreilly and @<a href="http://twitter.com/buckybit" target="_blank">buckybit</a>) asking whether we need page rank for people and not just sitesâ€¦..and <a href="http://www.readwriteweb.com/archives/as_the_sun_sets_on_myspace_-_what_will_beat_facebo.php#more" target="_blank">this post on readwriteweb</a> that asks is the state of now the harbinger of doom to walled gardens like Facebook. And there seems to be an arms race starting around real time search.</p>
<p>But Bruce Sterling (<a href="http://twitter.com/bruces" target="_blank">@bruces</a>) in <a href="http://interactions.acm.org/content/?p=1244" target="_blank">his cover story</a> for <a href="http://interactions.acm.org/" target="_blank">Interactions Magazine</a> examines some of the blinkering on <strong style="font-weight: normal;">â€œt</strong>wo inherently forward looking schools of thought and action [design and science fiction].â€ He writes:</p>
<p><strong>â€œWe have entered an unimagined culture. In this world of search engines and cross-links, of keywords and networks, the solid smokestacks of yesterdayâ€™s disciplines have blown out.â€</strong></p>
<p>While I was writing up this post, I found myself up at the crack of doom (4 am EST) with insomnia I attribute to a tweet from <a href="http://www.experientia.com/en/who-we-are/mark-vanderbeeken/" target="_blank">Mark Vanderbeeken</a> <a href="http://twitter.com/Vanderbeeken" target="_blank">@vanderbeeken</a> which I (<a href="http://twitter.com/tishshute">@tishshute</a> ) retweeted:</p>
<p><strong>â€œInternet of Things &#8211; An action plan for Europe,â€  (This EU Doc.  cites @<a href="http://twitter.com/agpublic" target="_blank">agpublic</a> â€™s Everyware) <a rel="nofollow" href="http://bit.ly/16uiu3" target="_blank">http://bit.ly/16uiu3</a> via @<a href="http://twitter.com/vanderbeeken" target="_blank">vanderbeeken</a>â€œ</strong></p>
<p>(I wish I had used the new microsyntax in Tweetdeck RE (for more on RE <a href="http://www.stoweboyd.com/message/2009/06/a-useful-bit-of-microsyntax-re.html" target="_blank">see Stowe Boydâ€™s post here</a>) then I would have been able to find @vanderbeekenâ€™s original tweet just now.)</p>
<p>So after a quick scan of the EU paper on the internet of things, and in a â€œhere comes everybodyâ€ pre-dawn state of mind, craving oracular pronouncement, I impulsively shot an email to Bruce Sterling.</p>
<p>[<strong>Note:</strong> the following is an asynchronous exchange &#8211; not synchronous as a <a href="http://wave.google.com/">Google Wave</a> would have made possible. Also I have pulled the conversation out of the original email format. Lars and Jens Rasmussen ofÂ  <a href="http://wave.google.com/">Google Wave</a> seem to have hit the nail on the head when they &#8220;set out to answer the question: What would email look like if we set out to invent it today?&#8221; (see <a href="http://radar.oreilly.com/2009/05/google-wave-what-might-email-l.html" target="_blank">this excellent post by Tim O&#8217;Reilly on Google Wave</a>)]</p>
<p><strong>Tish Shute: </strong>I shouldnâ€™t be up at 4am EST sending you more questions but I began reading The â€œInternet of Things â€“ An action plan for Europe,â€Â <a href="http://bit.ly/16uiu3" target="_blank">http://bit.ly/16uiu3</a> before I went to sleep and woke up thinking: â€œHow can we work on an action plan for everybody?â€ ((Another highlight of 140Conf. was <a href="http://www.areacodeinc.com/" target="_blank">Kevin Slavinâ€™s talk on â€œThings that Twitter</a> â€“Â  â€œsensor aesthetics are streamyâ€)).</p>
<p><strong>Bruce Sterling: *Everybody? Â What, allÂ <span style="font-family: arial;"><span style="font-size: small;">6,706,993,152 of us?</span></span></strong></p>
<p><strong>Tish Shute:</strong> How does, â€œitâ€™s all about the data,â€ and â€œgoogleâ€™s smarter than youâ€ thinking versus &#8220;bottom up&#8221;/&#8221;personal informatics&#8221;/&#8221;sem web&#8221; get worked out in the internet of things?</p>
<p><strong>Bruce Sterling:</strong> *<strong>Iâ€™d be guessing via mergers, acquisitions, lawsuits and police crackdowns, but you never know. Â You might have a massive financial collapse where innovations like this start coming out of slums and favelas. Â I heard such a great term at LIFT last week: Â â€Favela Chic.â€ Â Thatâ€™s when you are totally penniless and without commercial prospects of any kind but still wired to the gills and big on Facebook.</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3653530586_eb90ef0241_o.jpg"><img class="alignnone size-medium wp-image-3852" title="3653530586_eb90ef0241_o" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3653530586_eb90ef0241_o-300x207.jpg" alt="3653530586_eb90ef0241_o" width="300" height="207" /></a><br />
</strong></p>
<p>Photo of Bruce Sterling at Lift 2009 by <a href="http://www.flickr.com/photos/centralasian/" target="_blank">Centralasian</a></p>
<p><strong>Tish Shute:</strong> Could you elaborate on your comment:</p>
<blockquote><p><em><strong>&#8220;Also, this stuff theyâ€™re discussing: this is like all kindsa trouble ten years from now.&#8221; (from your postÂ <a href="http://www.wired.com/beyond_the_beyond/2009/03/spime-watch-data-shadows/" target="_blank">http://www.wired.com/beyond_the_beyond/2009/03/spime-watch-data-shadows/</a>)</strong></em></p></blockquote>
<p><strong>Bruce Sterling:</strong> <strong>*Okay: you know how much trouble SMS messages are in Iran right now, even though ten years ago, cellphones were only for foreigners and rich guys in Iran? Â Kinda like that.</strong></p>
<p><strong>Tish Shute</strong>: <a href="http://www.wired.com/beyond_the_beyond/2009/06/ruins-of-the-present/" target="_blank">You wrote here</a>:<em> &#8220;<strong>The idea of living in *abandoned prototypes* or giant failed larvalÂ  husks is very contemporary, very New Depression. Very â€œFavela Chic&#8230;â€</strong></em></p>
<p><a href="../wp-content/uploads/2009/06/squelette-300x221.jpg" target="_blank"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/squelette-300x2211.jpg"><img class="alignnone size-full wp-image-3855" title="squelette-300x221" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/squelette-300x2211.jpg" alt="squelette-300x221" width="300" height="221" /></a></p>
<p>And:</p>
<p><em><strong>&#8220;Ocasionally squatters move into â€œsquelettesâ€ and bring in some breeze-block, corrugated tin and plastic hoses, transforming squelettes into high-rise favelas. This doesnâ€™t work very well because itâ€™s tough to manage the utilities, especially the water.&#8221;</strong></em></p>
<p><strong>Tish Shute:</strong> So what happens when we rely on Google &amp; Twitter repurposed as our main means to access our government?Â  Not only repressive regimes can cut these utilities off, even though Twitter was asked to delay maintenance so that the Iranian Twitters could keep flowing, Michael Jackson brought Twitter down.</p>
<p><strong>Bruce Sterling: *Google and Twitter aren&#8217;t going to last long enough to become main means of an access to government. Â It&#8217;s not that Google and Twitter go away and we return to a previous status quo, however. Â It&#8217;s that they are ramshackle digital expedients that get replaced by Â even more ramshackle digital expedients.</strong></p>
<p><strong>In the meantime the stuff we used to call &#8220;government&#8221; gets similarly destabilized. Â It&#8217;s been privatized, or offshored, or turned into a hollow shell.</strong></p>
<p><strong>Tish: Shute:</strong> So is Twitter a squelette (like all our favorite internet platforms, including Google Wave which we havenâ€™t even had a chance to squat yet)? And is microsyntax our breeze-block, plastic hose and corrugated tin-Â  â€“ very Favela chic but vulnerable to the vagaries of Michael Jackson&#8217;s life and death, and deadly shut downs and snooping by repressive regimes that control the underlying utilities? (Squelettes, as Bruce Sterling points out, are:Â <strong><em> </em></strong><strong><em>â€œone of those coinages like â€œPrada Gothâ€ that spring out everywhere once they are pointed out.â€</em></strong><em>)</em></p>
<p><strong>Bruce Sterling: *We can draw a distinction here: Â &#8220;Gothic High Tech&#8221; is the top-end version, while &#8220;Favela Chic&#8221; is the low-end. Â &#8220;Gothic High Tech&#8221; would be the likes of a &#8220;repressive regime&#8221; which finds itself forced to conduct cruel, secret, spooky, Guantanamo cyberwars&#8230; it&#8217;s pretending to transparency, accountability and open elections, while below that surface is a weird, torchlit, Gothic hall of mirrors where invisible hands wreck banks, impoverish the civil population and kidnap people.</strong></p>
<p><strong>It&#8217;s &#8220;Gothic&#8221; because of its magnificent, elaborate appearance &#8212; very &#8220;Castle of Dracula&#8221; &#8212; but that no longer maps onto its panicky, extremist, transgressive behavior.</strong></p>
<p><strong>Gothic High Tech doesn&#8217;t live in &#8220;squelettes.&#8221; Â Gothic High Tech lives in fancier, more respectable structures called &#8220;stuffed animals.&#8221; Â A stuffed-animal used to be a functional building. From the outside it looks pretty much like it always did, maybe even &#8220;conservative.&#8221; Â Inside it&#8217;s half-retrofitted with aging, Frankenstein machineries, already outmoded, rapidly decaying.</strong></p>
<p><strong>A &#8220;stuffed animal&#8221; might, for instance, be a &#8220;savings and loan&#8221; where the behavior of the present-day inhabitants involves no actual saving and no actual loaning. Â Instead the inhabitants are on television negotiating a position in a crisis narrative and living on bailouts, while, every day, the cobwebs get a little thicker. Â &#8220;Regulatory capture&#8221; is stuffed-animal activity. Â &#8220;Failed states&#8221; and &#8220;hollow states&#8221; are stuffed animals.</strong></p>
<p><strong>&#8220;Favela Chic&#8221; is the same basic activity, but with much less money and institutional clout. Â In &#8220;Favela Chic&#8221; nobody bothers to ask for bailouts. Â They know the state has failed, or they themselves are engaged in weird activities they prefer to hide from the authorities. Â  &#8220;Favela Chic&#8221; lives within openly failed structures, or else in half-structures that are in &#8220;permanent beta&#8221; and falling down as rapidly as they can be erected. Â Favela Chic is bottom-up, open-sourced, heavily networked, subversive and piratical.</strong></p>
<p><strong>There&#8217;s a certain amount of class-transition between Gothic High Tech and Favela Chic &#8212; like, Twitter was Favela Chic and is heading straight for Gothic High Tech. Â But there&#8217;s much less transition than there used to be, because of income differentiation &#8212; the tiny faction of Gothic moguls &#8220;own&#8221; what&#8217;s left of most of the wealth, which they themselves are rapidly destroying. Â The general trend is not toward increasing global prosperity. Â The precarity is becoming general. Â The Favela beckons for everybody. Â That&#8217;s where most of the planet&#8217;s population lives already, and it&#8217;s certainly where most of the young people live. Â The idea of a &#8220;developing world&#8221; needs to be reversed; the end game is in the &#8220;developing world&#8221; and the rich nations are heading there.</strong></p>
<p><strong>Tish Shute:</strong> It seems to me that Twitter and the real time web of flow is a revolution in our means of communication presenting awesome opportunities.Â  But, are we squatters in an infrastructure that is hard to manage?</p>
<p><strong>Bruce Sterling: *Yes. I&#8217;d go farther and say that we are squatters in an infrastructure that methodically destroys previous systems of management. Â Especially itself: the closer you are to a revolutionary real-time web flow, the faster you have to reboot.</strong></p>
<p><strong>Tish Shute:</strong> And what is the answer to the question at the end of <a href="http://interactions.acm.org/content/?p=1244" target="_blank">your cover story for Interactions</a>:</p>
<p><strong><em>&#8220;The winds of the Net are full of straws. Who will make the bricks?&#8221;</em></strong></p>
<p><strong>Bruce Sterling: *I frankly have no idea. Â The storm-gusts are rising in a hurry and we are in for a whole lot of straws.</strong></p>
<p><strong>*I would point out that, if we could make up out minds about what kind of bricks we wanted, we could make them at tremendous speed. Â We&#8217;re not helpless: our productive capacity is frankly fantastic. Â Clearly we&#8217;ve lost the thread and can no longer explain what we&#8217;ve done to ourselves or how we get out of our fix. Â But we might surprise ourselves. Â 21st century Favela Chic is no mere favela, and Gothic High Tech isn&#8217;t just Gothic, it&#8217;s also very high tech. Â We&#8217;re in a Depression and it&#8217;s gonna last, but this is no 1930s Depression.</strong></p>
<p><strong><br />
</strong></p>
<h3><strong><a name="StoweInterview">Talking with Stowe Boyd</a></strong></h3>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3629162035_a9332a67e1_o.jpg"><img class="alignnone size-medium wp-image-3862" title="3629162035_a9332a67e1_o" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/3629162035_a9332a67e1_o-300x247.jpg" alt="3629162035_a9332a67e1_o" width="300" height="247" /></a><br />
</strong></p>
<p>Photo <a href="http://www.flickr.com/photos/stoweboyd/3629162035/" target="_blank">from Stowe Boyd&#8217;s Flickr stream,</a> &#8220;Little&#8221; Tower Of Babel, Pieter Bruegel the Younger. It is also a slide from his presentation, <a href="http://blip.tv/file/2267166" target="_blank">â€œThe evolution of Microsyntax.&#8221;</a></p>
<p><strong>[Note:</strong> Most of this conversation took part in a busy foyer at #140conf and various people joined in the conversation at different points.Â  I have cut out these other conversations and tried to maintain the thread of my own questions in the transcription.Â  But this may have resulted in a sense of choppiness and discontinuity in places.]</p>
<p><strong><strong>Tish Shute:</strong></strong> You have been on the front-line of so much web innovation, but, perhaps, you could give me a little back story on how you came to take the lead with microsyntax.org.</p>
<p><strong><strong>Stowe Boyd: </strong>Well, I&#8217;ve been on twitter 990 days or something. But long before Twitter became a commonplace household word, I&#8217;ve been advocating what I&#8217;ve been calling flow application, based on the streaming metaphor &#8211; the notion that you&#8217;d have a stream of updates coming from people that you chose to follow, which is now being called the asymmetric follow model. Years and years ago I postulated that that model was going to come along and completely change all future significant social applications. Back in the late nineties, I introduced a term &#8220;Social tools&#8221; and said social tools were going to come along and change the way the web worked. So I have a history of being 4 or 5 years ahead of what actually happens.</strong></p>
<p><strong>Microsyntax is sort of an interesting outgrowth of that. In a way it&#8217;s a very narrow area, in the sense that it&#8217;s focusing on these information patterns, the way that people want to encode information in the twitter stream or in the realtime stream of other apps. So it&#8217;s very narrow in the sense that it doesn&#8217;t immediately include all sorts of other things like these sports figures talking about how to market their services or whatever. But on the other hand it&#8217;s very broad, because every sort of information that you can imagine is going to be streaming through twitter and related applications.</strong></p>
<p><strong>We saw examples today of plants demanding water or DJ&#8217;s posting their set lists as they&#8217;re playing them, devices or equipment talking about its status, video stream from surveilance cameras. Everything you can possibly imagine will find it&#8217;s way in that stream. It&#8217;s all going to be encoded in different ways and grappling with that is actually an interesting problem. But more importantly it&#8217;s better for us as a community of users if we try to approach it in some systematic fashion. That&#8217;s the purpose of Microsyntax.org &#8211; this nonprofit. The concept of microsyntax is immediately evident to people who use Twitter, and that is we have a whole bunch of conventions that have emerged, and we have some places where it would be nice if conventions did emerge, but we don&#8217;t have them yet. And the idea of creating a nonprofit to do it is a sensible thing to do. So I decided I&#8217;ll go along with the request that others have made, because other people asked me to do this. So that&#8217;s a little unusual for me.</strong></p>
<p><strong>The Web of Flow<br />
</strong></p>
<p><strong>Tish: </strong>What first attracted my attention to Microsyntax.org was Bruce Sterling&#8217;s post<strong> </strong><strong>suggesting combining pachube feeds with Googlewave and then describing this in microsyntax</strong>. Why do you think Bruce Sterling posed this particular challenge?</p>
<p><strong>Stowe: Well, because he sees that everything is moving into the web of flow. Everything is moving out of the web of pages. In the next ten years we&#8217;re going to cease to experience the web as we do now, which is as a bunch of pages and we move around from link to link. And that&#8217;s what browsers are about. They help us move from page to page on the web. But Twitter, and before it the minifeed and instant messaging and a handful of other really interesting applications, have suggested a completely different web where information flows from other people to you through streaming mechanisms.</strong></p>
<p><strong> And the really interesting stuff that comes to me now on a daily basis is streaming to me through Twitter, not through my RSS reader, not me wandering around figuring out what to google, news or something. And that&#8217;s an indicator of the fact that that&#8217;s the hottest, coolest way to do it now, and means that in the future it will be &#8220;the way&#8221; that it&#8217;s done. So there will still be a web of pages out there, but it&#8217;ll exist like an archive. And we won&#8217;t experience the web that way in general because, &#8220;why would I go to the web page and see the guy&#8217;s blog post on his page, when it&#8217;s been served up to me 16 other ways?&#8221; And most importantly I&#8217;ve found it initially in some client, because somebody recommended it to me, and I resolved it in a hover window in my Twitter client. I&#8217;d never go to the page. I comment on it here&#8230;</strong><strong><br />
</strong></p>
<p><strong>Tish:</strong> I like your framing,Â  &#8220;the web of flow&#8230;&#8221;</p>
<p><strong>Stowe: Well it&#8217;s also that one of the characteristics is the tempo is different. I actually wrote a post about this, that I think it&#8217;s fundamentally important. It&#8217;s not really gotten much drift yet. I think it&#8217;s too hard for people to think this way. They just can&#8217;t get it. </strong></p>
<p><strong>The dimension that&#8217;s really most interesting is the transition from secret to private to public. The fact that Twitter is inherently public as a default is a breakthrough. I mean there&#8217;s nothing else like this. The first time that the idea, except for the blogosphere itself which is the concept it&#8217;s built on,Â  the inherent notion is that you&#8217;re publishing stuff and anyone can get access to it. But the tempo thing really matters, the fact that it&#8217;s near synchronous so your perception of what you feel like you&#8217;re doing is you feel like you&#8217;re in a stream of updates from friends. We know that. But the sensation is dramatically different than your close personal relationship with your inbox, which is email. Email is secret, closed, and the sense is the context is that it&#8217;s an inbox, like the one on your desk. And you are boxed in by that, and you&#8217;re not actually feeling like you&#8217;re dealing with people. You feel like you&#8217;re dealing with the inbox.</strong></p>
<p><strong>Tish:</strong> This was only present in boxes as you say &#8211; chat rooms, IM, IRC, MUDs, Virtual Worlds but they all had that realtime experience going on.</p>
<p><strong>Stowe: Yes instant messaging, chat rooms, etc. they were private. You had to invite people. The update paradigm on instant messaging was backwards. It said I want to follow this guys updates, but you had to get his permissin to do it. That seemed like a sensible thing in the mid &#8217;90s when people worried about privacy and so they made it private. And private is not good, actually.</strong></p>
<p><strong>Tish: </strong>IRC is exactly like twitter but it&#8217;s off in closed worlds&#8230;</p>
<p><strong>Stowe: Yes you have to know about them. You can&#8217;t just stumble across them, you have to be invited or give the password. It&#8217;s another closed model. But instant messaging is the father of all this, or the mother, depending on which way you look at it. But that fundamental last thing, it&#8217;s based on a quote by Gabriel GarcÃ­a MÃ¡rquez</strong> <strong>which is, &#8220;All people have three lives. they have a public life, a private life and a secret life.&#8221; And we are philosophically moving from a time where things were primarily secret (pre internet) to a time where things were primarily private which is web 1.0 into this new web where things are going to be primarily public and open and immediate. So we are building the scaffolding real fast to allow that to happen. And it&#8217;ll take us away from the old web. The old web will go down there. Everything&#8217;s built on dirt right? Do you see very much dirt in cities? No. No. The dirt is all concealed. It&#8217;s down there. If you want to go find it you can dig underneath the floor, and there&#8217;s dirt under there. But most people don&#8217;t spend very much time down there we send professionals down there to put plumbing and pipes underneath and we experience the world like this.</strong></p>
<p><strong>Tish: </strong>I met Eric Horvitz (Microsoft Researcher) at <a href="http://en.oreilly.com/where2009/" target="_blank">Where 2.0</a>.Â  He is interested in community sensing and ideas about how people can share data in a win win way (<a href="http://en.oreilly.com/where2009/public/schedule/speaker/49828" target="_blank">see here</a>). Do we need to work out ways to make sure people&#8217;s relationship to their data is not just to have it harvested by others for profit or repression?</p>
<p><strong>Stowe: I&#8217;m interested in this actually. I recently wrote a piece about the governance of Twitter and for the purpose of your question let&#8217;s just go along with the premise that Twitter&#8217;s going to continue to be benevolent, and everything will be open, and everything will be public and everyone can do whatever they want with it. Well there&#8217;s a tremendous amount of things that people will want to do, but most of the things that they will set about doing to begin with will turn out to be irrelevant. </strong></p>
<p><strong>People will want to measure sentiment and all this other stuff, for example. And they&#8217;ll do that and they&#8217;ll coerce a lot of big brands and so on to pay money for these services. But the thing that&#8217;s going on with the now web, my web of flow is that people are disconnecting from self identity based on mass affiliations. So ultimately the more you spend your time doing this, you don&#8217;t give a s**t about brands. Nike &#8211; I could care less. </strong></p>
<p><strong>So there is defection from the mass media. We heard it today. There&#8217;s people here who were like booing these media guys, who think they should be held up as gods because, &#8220;Oh I&#8217;m one of the first to use Twitter on TV.&#8221; Well F*** you, I don&#8217;t give a s***. I don&#8217;t watch television. Every hour that people spend on the internet is an hour they do not spend watching television. It&#8217;s a direct and one to one correlation. Sure people still want to get their fill of whatever, the NBA playoffs, but significantly less than ever before. Which is why they&#8217;re increasingly irrelevant. </strong></p>
<p><strong>So the idea that some magicians are going to come along, figure out how to mine this data to find out how I feel about my automobile? I do not have a close personal relationship with an automobile. I don&#8217;t. And increasingly people won&#8217;t affiliate that way. They won&#8217;t bond with their stuff like that. That&#8217;s why I say most of this information won&#8217;t be helpful. It&#8217;ll be interesting sociologically. Webthropologists will be able to make it interesting &#8211; and marketing people, who are trying to figure what&#8217;s going on, might be able to do the right thing. But if they&#8217;re trying to take it and make it do something for them&#8230; They&#8217;re going to try to take it and use it to change us? To control us? It&#8217;s like that line in The Labyrinth,Â  &#8220;you don&#8217;t have any power over me anymore.&#8221;</strong></p>
<p><strong>Tish: </strong>You are actually saying something much more radical than say community sensing or that we need to store our own data. You seem to be saying that in some ways it doesn&#8217;t matter whether you store your own data or your data&#8217;s in the cloud (although Iran seems to be showing how centralized network control can be a powerful tool of repression).</p>
<p><strong>Stowe: Most of the things that they&#8217;re going to try to use it to do won&#8217;t work because we&#8217;re not the same anymore. It&#8217;s inevitable. </strong><strong>You use these tools, and you are changed. And itâ€™s just a question of how long you use them and the longer you use them, the more you use them, the more changed you are. When people shift to a basis of sociality around connection with other people as opposed to mass affiliation, itâ€™s different. Itâ€™s completely different. Your whole system of ethics, the way you judge the world and decide whatâ€™s important, is different. And not only different itâ€™s better. Itâ€™s a better way to deal with the world.</strong><strong> And these guys are still hoping that the old rules hold, but they don&#8217;t. They just won&#8217;t.</strong></p>
<p><strong>Tish:</strong> This isÂ  rather a broad question. But one of the things that Kevin Slavin brought up in his talk is about things that tweet &#8211; your plant is tweeting, your shoes are tweeting, your house is tweeting. Twitter is a natural medium for the internet of things and what Kevin Slavin calls the &#8220;streamy aesthetics of sensors.&#8221; But with all these things that are tweeting people have had a lot of problems with filtering that kind of flood of tweets.Â  For example, I may want to listen to a tweet from my plant telling me it needs water when I am actually at home and can do something about it. But I may not want to listen to my plant whining about being thirsty all the time. Can microsyntax help? Or is this a place for those appliances you mentioned earlier?</p>
<p><strong>Stowe:Â  There&#8217;s a whole other category of stuff having to do with priorities &#8211; this isn&#8217;t really a microsyntax &#8211; of different times of day when you&#8217;re involved in different activities. You may be more or less interested in different collections of Twitter streams. And the notion of how you go about dealing with that is &#8211; it could semi-microsyntactical, but maybe it isn&#8217;t at all. Maybe it&#8217;s all just having to do with the way that clever client apps work. So maybe if you have a super duper Tweet Deck, and you say it&#8217;s evening time and I&#8217;m in my evening mode, so a whole bunch get blocked and a different group of people, for example, your Parcheesi evening friends get enabled, and at the weekend when you have time to do house care you listen to your house.</strong></p>
<p><strong>I don&#8217;t think this is a microsyntactical issue. I don&#8217;t think this is an issue of what&#8217;s embedded in the stream except as a notion of priorities. There&#8217;s a lot of people who would like to have a mechanism to indicate priority. But I can&#8217;t think of any effective way to do it that wouldn&#8217;t immediately be abused. Of course anything can be abused. This guy thinks that this is high priority, but maybe once again it&#8217;s one of these sort of mutual dimensions where they want to indicate it&#8217;s high priority but I say I only believe in priorities from certain people.</strong></p>
<p><strong> But still there might be a case to be made for allowing people to put some kind of indication of priority in a tweet, so that there is a hope that it could rise out of the clutter. I talked about some things that I&#8217;m interested in that are just purely operational. One of these things I want to get people to build, in Tweet Deck, but it could be in any kind of a client, I want to be able to say don&#8217;t let this tweet go away. So I&#8217;m getting them to build the pushpin. So I can put a pushpin in the thing and it&#8217;ll stay at the top, or stay at the bottom, wherever I put it. And then I can respond to it later, because if I don&#8217;t respond to it right now, in most places it goes bye, and then you&#8217;ve got to go search for it &#8211; a pain in the ass. </strong></p>
<p><strong>Then I say if I&#8217;m going to have pushpins I want to have a record of all the things that I&#8217;ve push pinned &#8211; a history of pushpins. But it&#8217;s all client based. It&#8217;s got nothing to do with what&#8217;s in the text. </strong></p>
<p><strong>Tish:</strong> And knowing how many of your followers had already got a particularly tweet from somewhere else which would be very useful has to be done as an appliance&#8230;</p>
<p><strong>Stowe: Yes that&#8217;s sort of a downstream metrics kind of thing.</strong></p>
<p><strong>Microsyntax is not the answer to every kind of thing. Like, appropriately dealing with hash tags in a sensible fashion is not purely a function of how we use them. But some of it is the structure itself. That&#8217;s why I came up with the subtags model. So everybody at <a href="http://en.oreilly.com/where2009/public/schedule/speaker/49828" target="_blank">South by South Wes</a>t tagged everything southbysouthwest, so if you searched for it there were 150,000 hits a day. So it was useless. But if people had used the subtags model, or something else like that, you could have searched for the subtag. So you could have searched for south-by-southwest.parties or south-by-southwest.thirtytwo-bit which was a particular party.</strong></p>
<p><strong> And so if you have sensible tools that are doing a better job of aggregating information around more complicated ways of structuring hash information, then we can get past the fact that brute force search just isn&#8217;t going to work. It just won&#8217;t work. For example somebody going through the stuff from today all the stuff that says #140conf but they want to find just the stuff that had to do with media, they wont be able to do it. They&#8217;ll have to do it manually. So some of that is better syntax. But some of it is better tools. I mean somebody should go build a better hashtags.org. </strong></p>
<p><strong>Tish: </strong>And in terms of creating a web of flow not all of what we need can been done within the Twitter messages &#8211; it has to be done in the client and external applications<strong>&#8230;<br />
</strong></p>
<p><strong>Stowe: Yes, there&#8217;s this class of applications that listen very diligently to what you&#8217;re doing in Twitter. The primary mechanism of how you influence the app is doing stuff in Twitter. You can always go to the app and look at it and fool with it. But, if in fact, the preponderance of your interaction is, it&#8217;s listening or talking to you in Twitter &#8211; I call that an appliance, to distinguish it from these other apps. Any external application might provide you with the mechanism to dump information into Twitter, but you have to go to the app to do the primary kinds of interaction. In fact major functionality may not be available at all in Twitter or maybe no functionality, except for like <a href="http://brightkite.com/" target="_blank">Brightkite</a> allows you to dump stuff into Twitter. But the idea is that primarily you do it there. Or there&#8217;s a very limited thing like you get with Brightkite, you can send a message saying, &#8220;I&#8217;m somewhere.&#8221;</strong></p>
<p><strong>Tish: </strong>Should location be put into tags?</p>
<p><strong>Stowe: I don&#8217;t think that location should be put into tags. In other words, if I talk about Paris, then using hashtags is sensible. Or I&#8217;m talking about Sherlock Holmes and his relationship to London. It&#8217;s a conceptual thing &#8211; like talking about Heaven. It doesn&#8217;t actually have to exist on the planet somewhere. But it&#8217;s really different if you say I am in New York City right now or the more interesting case I think really is, &#8220;I am going to be in Boston colon next week&#8221; or June 15 dash 17. And I want that information to be available to everybody or a select group of my friends, or just to myself and have it find it&#8217;s way into my calendar. But that&#8217;s really different than saying &#8220;I&#8217;ve always enjoyed it when I visit HASH New York.&#8221; </strong></p>
<p><strong>Tish:</strong> I liked Kevin Slavin&#8217;sÂ  phrase &#8220;the streamy aesthetics of sensors.&#8221; I guess streamy aesthetics is something you have given a lot of thought to?<strong><br />
</strong></p>
<p><strong>Stowe: First of all I read a lot of poetry, so I believe in poetics in reading and writing. But I don&#8217;t think punctuation marks really degrade that dramatically. I mean it&#8217;s OK to have periods and exclamation marks and commas, and things can still be poetic. I think it&#8217;s important to try to dream up microsyntax that doesn&#8217;t take your eyes off the content, the stuff that people are really trying to say. So that&#8217;s why for example I hate L: as a location queue because anything that has letters in it, if you&#8217;re not supposed to say them, &#8211; if you&#8217;re not mentally supposed to say them, or if you&#8217;re not supposed to say them if you read it aloud, causes you to do a stutter step when you&#8217;re reading the tweet. </strong></p>
<p><strong>But if you use punctuation marks, special characters at various points or placement conventions, like where do things appear in order in a tweet, those things don&#8217;t have the same toe stub, that I think really ugly syntactic conventions would. So it&#8217;s possible to make these things pretty. For example I&#8217;m testing out trying on various conventions for what do you do with a re-tweet. If you want to re-tweet it, if you actually want to have people see it, and then you want to make your own comment. So the question is how do you separate the two? So, RT &#8211; guy&#8217;s name and then text. Well then how do you know where his text ends and my text begins. So certain things don&#8217;t work for me. I mean like a comma is not enough because there might be a comma in the text. And a period doesn&#8217;t work because there might be multiple sentences. So it has to be something else.<br />
</strong></p>
<p><strong>Tish:</strong> And aren&#8217;t there confusions that arise because there are already conventions of usage&#8230;</p>
<p><strong>Stowe: Yes, I have problems with angle brackets, for example. Sometimes when the tweets wind up in not particularly smart rendering systems, it gets confused because it thinks they&#8217;re html. For example, somebody was using the open angle bracket, and even though it&#8217;s just text, and it&#8217;s not html, when I took that tweet and put it in a blog post, it thought it was the start of an html tag, and so it disappeared. You could use an html escape character but that&#8217;s the kind of thing that causes problems. The other problem is there are other ways that it&#8217;s been used a lot. People have used this as the thing to introduce the comment that they&#8217;re making after a re-tweet.</strong></p>
<p><strong>Tish: </strong>There must be very few characters not being used for other things?<strong><br />
</strong></p>
<p><strong>Stowe:Â  Yes but for example, when we use geoslashes there&#8217;s a blank in front of it, or it&#8217;s the first character in the tweet &#8211; so i</strong><strong>n that particular exampleÂ  it is similar because slash is used for other things. </strong><strong> But, in all the places where it is used, generally there&#8217;s a character that precedes it &#8211; like &#8220;w/o&#8221; for without or a fraction or a long list list ofÂ  these options. </strong><strong> </strong><strong>[</strong>Geoslash is microsyntax for user location using slash (&#8216;/&#8217;) &#8212; as in &#8216;just arrived /SFO&#8217; or &#8216;heading to /New York: tomorrow/&#8217; for more see <a href="http://microsyntax.pbworks.com/Geoslash" target="_blank">Stowe&#8217;s post here</a>.]</p>
<p><strong>When I was rooting around for a character I looked for a long time.Â  And also I wanted to make sure that the slash was easily reachable on cell phones, which, for example, angle bracket isn&#8217;t. So if you&#8217;re on a phone and you want to say I&#8217;m here &#8211; I don&#8217;t know how far you have to go on your phone, but it isn&#8217;t in the first eight characters of Symbian. I looked carefully to make sure it wasn&#8217;t a common character that people use widely in everyday speech like commas and semicolons and exclamation marks, but was still easily used. There are still other alternatives. It&#8217;s not the only one. There are cases to be made for all of these things &#8211; pros and cons for all of them.</strong></p>
<p><strong><br />
Anyway I was making the case of experimenting with different things for this re-tweet, &#8220;Here&#8217;s my comment.&#8221; And I was trying all sorts of stuff like double colon, I tried all kinds of things I wanted to see what it looked like. So starting this week I used the solid bar, the upright bar. It sets it off. It really feels like there&#8217;s a divide. There&#8217;s a cleavage point, and that&#8217;s that guy and this is this guy. So I&#8217;m going to write it up as one of the candidates. Some people use square brackets and many other things. There are many personal conventions but nothing has become a real convention, accepted as the norm.</strong></p>
<p><strong>[ </strong>Note: Our conversation ended here as the presentations had resumed at <a href="http://www.140conf.com/" target="_blank">140 Characters Conference</a> ]</p>
<p><strong><br />
</strong></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/06/28/twitter-and-the-web-of-flow-talking-with-stowe-boyd-bruce-sterling-about-microsyntax-squelettes-favela-chic-and-the-state-of-now/feed/</wfw:commentRss>
		<slash:comments>4</slash:comments>
		</item>
		<item>
		<title>Location Becomes Oxygen at Where 2.0 &amp; WhereCamp</title>
		<link>https://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/</link>
		<comments>https://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/#comments</comments>
		<pubDate>Tue, 02 Jun 2009 21:43:49 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[online privacy]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[Aaron Straup Cope]]></category>
		<category><![CDATA[Anselm Hook]]></category>
		<category><![CDATA[bottom up urban informatics]]></category>
		<category><![CDATA[Brady Forrest]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[community sensing]]></category>
		<category><![CDATA[curating big data]]></category>
		<category><![CDATA[Dan Catt]]></category>
		<category><![CDATA[Eric Horvitz]]></category>
		<category><![CDATA[everyware]]></category>
		<category><![CDATA[FireEagle]]></category>
		<category><![CDATA[Flickr Corrections]]></category>
		<category><![CDATA[Flickr Nearby]]></category>
		<category><![CDATA[Food Genome]]></category>
		<category><![CDATA[Gene Becker]]></category>
		<category><![CDATA[geo platform]]></category>
		<category><![CDATA[geo platforms]]></category>
		<category><![CDATA[geoblogging]]></category>
		<category><![CDATA[geoplanet]]></category>
		<category><![CDATA[geotagging]]></category>
		<category><![CDATA[geowanking]]></category>
		<category><![CDATA[GigaPan]]></category>
		<category><![CDATA[gigapanning]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[googlewave]]></category>
		<category><![CDATA[headmap manifesto]]></category>
		<category><![CDATA[J.G. Ballard]]></category>
		<category><![CDATA[Jo Walsh]]></category>
		<category><![CDATA[Joshua Schachter]]></category>
		<category><![CDATA[location awaeness]]></category>
		<category><![CDATA[location versus place]]></category>
		<category><![CDATA[locative media]]></category>
		<category><![CDATA[machine intelligence and human intelligence]]></category>
		<category><![CDATA[machine learning]]></category>
		<category><![CDATA[magic words and microsyntax]]></category>
		<category><![CDATA[Mapping Hacks]]></category>
		<category><![CDATA[Marc Powell]]></category>
		<category><![CDATA[Microsyntax]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[neogeography]]></category>
		<category><![CDATA[Odeo Yokai]]></category>
		<category><![CDATA[OpenGeo]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[paleogeography]]></category>
		<category><![CDATA[Papernet]]></category>
		<category><![CDATA[personal informatics]]></category>
		<category><![CDATA[Placemaker]]></category>
		<category><![CDATA[privacy and community sensing]]></category>
		<category><![CDATA[privacy and sensor networks]]></category>
		<category><![CDATA[psychogeography]]></category>
		<category><![CDATA[psychosynthography]]></category>
		<category><![CDATA[Raven Zachary]]></category>
		<category><![CDATA[real time web based visualization and mapping]]></category>
		<category><![CDATA[reality mining]]></category>
		<category><![CDATA[Rich Gibson]]></category>
		<category><![CDATA[Schuyler Erie]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[shape files]]></category>
		<category><![CDATA[shapefiles]]></category>
		<category><![CDATA[smart cities]]></category>
		<category><![CDATA[smart phones]]></category>
		<category><![CDATA[social geography]]></category>
		<category><![CDATA[social networks]]></category>
		<category><![CDATA[social reality mining]]></category>
		<category><![CDATA[Sophia Parafina]]></category>
		<category><![CDATA[Stamen Design]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[The Ubiquitous Media Studio]]></category>
		<category><![CDATA[the web in the world]]></category>
		<category><![CDATA[Tom Carden]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[ubicomp hackers]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[wearable sensory substitution devices for navigation]]></category>
		<category><![CDATA[Where2.0]]></category>
		<category><![CDATA[WhereCamp]]></category>
		<category><![CDATA[WOEID]]></category>
		<category><![CDATA[yahoo! geotechnologies group]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3567</guid>
		<description><![CDATA[curatingbigdatapost]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/anselmcircletime.jpg"><img class="alignnone size-medium wp-image-3578" title="anselmcircletime" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/anselmcircletime-300x199.jpg" alt="anselmcircletime" width="300" height="199" /></a></p>
<p>The biggest news at <a href="http://en.oreilly.com/where2009/" target="_blank">Where 2.0, 2009</a> came from the<a href="http://developer.yahoo.com/geo/" target="_blank"> Yahoo!</a><a href="http://developer.yahoo.com/geo/" target="_blank"> G</a><a href="http://developer.yahoo.com/geo/">eo Technologies Group</a>. Tyler Bell, announced Yahoo! <a href="http://developer.yahoo.com/geo/placemaker">Placemaker</a> and the opening up of the <a href="http://developer.yahoo.com/geo/geoplanet/" target="_blank">GeoPlanet</a> data set, â€œall of the WOEIDs [<a href="http://developer.yahoo.com/geo/">Where On Earth (WOE)</a> IDs] available as a free download under Creative Commons in Juneâ€ (see <a href="http://radar.oreilly.com/brady/" target="_blank">Brady Forrestâ€™s post</a> for more details).</p>
<p><a id="qa9y" title="WhereCamp 2009" href="http://wherecamp.pbworks.com/WhereCamp2009" target="_blank">WhereCamp 2009</a> was held immediately after <a href="http://en.oreilly.com/where2009/" target="_blank">Where 2.0</a> and was a great place to chew on the events and ideas of Where 2.0.Â  In the picture above Anselm Hook addresses the WhereCamp morning circle in the courtyard outside the <a id="i:ij" title="Social Tex" href="http://www.socialtext.com/" target="_blank">Social Tex</a>t offices in Palo Alto. Anselm pointed out to me:</p>
<p><strong>&#8220;there are interesting implications of placemaker in combination with other yahoo assets &#8211; in particular <a href="http://developer.yahoo.com/yql/" target="_blank">YQL</a> &#8211; placemaker by itself is neat &#8211; but placemaker combined with everything else is a natural missing piece that is a big enabler.Â  Yahoo has been impressive.&#8221;</strong></p>
<p><strong> </strong>With all the Geo platform power available to us now, also (also see<a href="http://radar.oreilly.com/2009/05/new-geo-for-devs-from-google-i.html" target="_blank"> New Geo for Devs from Google I/O</a>), there isnâ€™t a shadow of a doubt in my mind Brady is right when he said, just before the Where 2009 conference: &#8220;<strong>Location is no longer a differentiator it&#8217;s going to become oxygenâ€ </strong> <a href="http://www.webmonkey.com/blog/New_Wave_of_Apps_Build__Where__Into_the_Web" target="_blank">(quote from WebMonkey).</a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/spatialjunkies1.jpg"><img class="alignnone size-medium wp-image-3612" title="spatialjunkies1" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/spatialjunkies1-300x199.jpg" alt="spatialjunkies1" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/yahoogeo41.jpg"><img class="alignnone size-medium wp-image-3614" title="yahoogeo41" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/yahoogeo41-300x199.jpg" alt="yahoogeo41" width="300" height="199" /></a></p>
<p><em>The Yahoo! GeoPlanet team at WhereCamp &#8211; Tyler Bell, (talking to Brady Forrest in picture on the left) is sporting his spatial junkies T-Shirt. Photo on right, Aaron Cope, Tyler Bell, Martin Barnes, Gary Gale.</em></p>
<p>WhereCamp was alive with key figures from the social geography movement who knew the power of these new tools (see <a href="http://www.flickr.com/photos/ugotrade/sets/72157618662411286/" target="_blank">some of my photos of WhereCamp on Flickr here</a>).</p>
<p>The importance of the Yahoo! announcement really became clear to me at <a href="http://www.socialtext.net/wherecamp/index.cgi" target="_blank">WhereCamp</a> where I attended sessions all day Saturday including the Curating Big Data Session led by <a href="http://stamen.com/studio/tom" target="_blank">Tom Carden, Stamen Design</a> and <a href="http://www.aaronstraupcope.com/" target="_blank">Aaron Straup Cope</a>, Flickr, (see Aaronâ€™s slides from his<a href="http://en.oreilly.com/where2009/public/schedule/detail/7212" target="_blank"> Where 2.0 presentation on â€œThe Shape of Alphaâ€ here</a> and video <a href="http://where.blip.tv/file/2167471/" target="_blank">here</a>).</p>
<p>Anselm Hook, a prime mover for WhereCamp, is a leading philosopher of place making and veteran software developer who led <a href="http://platial.com/" target="_blank">Platia</a>l engineering and is now at web consultancy <a rel="nofollow" href="http://makerlab.com/">http://makerlab.com</a><span class="bio">. If you missed Anselm at WhereCamp he will be presenting on, <a href="http://opensourcebridge.org/sessions/246" target="_blank">Ubiquitous Angels</a> at <a href="http://opensourcebridge.org/users/288" target="_blank">The OpenSource Bridge</a>, Portland, Oregon, June 17th -19th, 2009.</span></p>
<p>Anselm describes where he thinks the challenges are:</p>
<p><strong>â€œWe should be mapping information that in some ways has been historically unmappable because it is 1) not valued or is 2) actively seen as threatening or is 3) simply too hard to map using traditional tools.â€</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/wherecampschedul.jpg"><img class="alignnone size-medium wp-image-3680" title="wherecampschedul" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/wherecampschedul-300x199.jpg" alt="wherecampschedul" width="300" height="199" /></a></p>
<p><em>The WhereCamp Schedule</em></p>
<p><strong><span style="font-size: medium;">The Shape of Alpha</span></strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-57.png"><img class="alignnone size-medium wp-image-3647" title="picture-57" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-57-300x220.png" alt="picture-57" width="300" height="220" /></a></p>
<p><em>Screen capture from Aaron&#8217;s <a href="http://en.oreilly.com/where2009/public/schedule/detail/7212" target="_blank">Where 2.0 presentation on â€œThe Shape of Alpha.</a> Original photo from Flickr user <a href="http://www.ï¬‚ickr.com/photos/nickisconfused/3291840240/" target="_blank">&#8220;NickIsConfused&#8221;</a>.</em></p>
<p>Aaron Straup Copesâ€™s work on <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">â€œThe Shape of Alphaâ€</a> puts key questions about curating big data center stage.</p>
<p>Firstly, the exploration of what it means to curate/collaborate over meaning from â€œthe abundance of data produced in the precise but distant language of machinesâ€ (also see <a href="http://www.archimuse.com/mw2009/abstracts/prg_335001944.html" target="_blank">The Interpretation of Bias (and the bias of interpretation)</a>. The Shape of Alpha uses a process of <a href="http://code.flickr.com/blog/2008/09/04/whos-on-first/">reverse-geocoding</a> to translate machine-generated geographic data into place names that people can understand and relate to.</p>
<p>The <a href="http://en.wikipedia.org/wiki/Shapefile" target="_blank">shapefiles</a> are built with nothing but geotagged photos and some code called clustr (written by the brilliantÂ  <a href="http://iconocla.st/cv.html" target="_blank">Schuyler Erie</a> &#8211; co-author of <a href="http://search.barnesandnoble.com/Mapping-Hacks/Schuyler-Erie/e/9780596007034" target="_blank">Mapping Hacks</a>). Anyone can make these <a href="http://en.wikipedia.org/wiki/Shapefile" target="_blank">shapefiles</a>. You can get the shapefiles out of theÂ  <a href="http://www.flickr.com/services/api">Flickr API</a>. Aaron has been keying off WOEIDs (<a href="http://developer.yahoo.com/geo/">Where On Earth (WOE)</a> IDs) but as Aaron noted you can key off anything you like &#8211; tags are an obvious choice.</p>
<p>Wow! You can reinvent mapping with this stuff.</p>
<p>Very importantly, <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">â€œThe Shape of Alpha,â€</a> tells us something about how we relate to place versus location. The emotions, disputes and behavior related to place also emerge through crowd sourced corrections.Â  For more <a href="http://www.aaronland.info/weblog/2008/07/27/invisible/#corrections" target="_blank">see this very evocative post by Aaron about corrections and treating airports as cities</a>.Â  There is a glorious thread/riff and ode to the genius ofÂ  J. G. Ballard pursued by Aaron and Dan Catt in their posts (also see Dan Catt&#8217;s, <a title="J.G. Ballard, Flickr, naked singularities and 3-letter airportÂ codes" rel="bookmark" href="http://geobloggers.com/2009/05/11/j-g-ballard-flickr-naked-singularities-and-3-letter-airports-code/">J.G. Ballard, Flickr, naked singularities and 3-letter airportÂ codes</a>, and Aaron pointed me to <a href="http://www.ballardian.com/the-real-concrete-island" target="_blank">this brilliant &#8220;geo-detective work&#8221; </a>on <a href="http://www.ballardian.com/biblio-concrete-island">Concrete Island</a>, by Mike Bonsall <a title="J.G. Ballard, Flickr, naked singularities and 3-letter airportÂ codes" rel="bookmark" href="http://geobloggers.com/2009/05/11/j-g-ballard-flickr-naked-singularities-and-3-letter-airports-code/">.</a></p>
<p>Dan Catt created <a href="http://geobloggers.com/" target="_blank">geobloggers</a> and â€œseeded the geotagging community around the Web.â€ I met Reverend Dan Catt (Twitter @revdancatt ) at Where 2.0 when he was kind enough to share part of his seat so I could join a very interesting discussion with Aaron on The Shape of Alpha.</p>
<p>As <a href="http://www.aaronland.info/weblog/2008/07/27/invisible/#corrections" target="_blank">Aaron points out</a> they decided to treat &#8220;the airport itself <em>as</em> the town&#8230;&#8221;Â  not (only) because they admired the work of <a href="http://www.jgballard.com/airports.htm">J.G. Ballard</a>,Â                      &#8220;but because it is the right thing to do.&#8221;</p>
<p>Dan Catt has excellent <a href="http://blog.flickr.net/en/2008/08/08/introducing-a-new-way-to-geotag/">blog posts</a> &#8220;describing                     the nuts and bolts of how &#8216;corrections&#8217; works.&#8221;Â  Aaron points out,Â  &#8220;in <a href="http://code.flickr.com/blog/2008/08/08/location-keeping-it-real-on-the-streets-yo/">the nerdier of                     the two</a> Dan sums it up nicely by saying&#8221;:</p>
<blockquote class="hier"><p><strong>&#8220;On a slightly more philosophical level, itâ€™s a never                         ending process. Weâ€™ll never reach a point where we can                         say â€œRight thatâ€™s in, all borders between places have                         been decided.â€ But what we should end up with are                         boundaries as defined by Flickr users.</strong></p>
<p><strong>&#8230;</strong></p>
<p><strong> </strong></p>
<p><strong>For us, itâ€™s a first small step into an experiment, and actually a pretty big                         experiment as weâ€™re potentially accepting â€œcorrectionsâ€ from our millions and                         millions of users. Weâ€™re not quite sure how itâ€™ll all turn out, but weâ€™re armed                         with Maths, Algorithms and kitten photos.&#8221;</strong></p></blockquote>
<p><strong> </strong></p>
<p><strong> </strong></p>
<h3>Psychosynthography &#8211; &#8220;Wearing Geography as a Perfume&#8221;</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-59.png"><img class="alignnone size-medium wp-image-3649" title="picture-59" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-59-300x224.png" alt="picture-59" width="300" height="224" /></a><em> </em></p>
<p><em>Psychosynthography screen capture from Aaron Straup Cope&#8217;s </em><a href="http://en.oreilly.com/where2009/public/schedule/detail/7212" target="_blank">Where 2.0 presentation </a><em>. Original photo from Flickr user,Â  <a href="http://www.ï¬‚ickr.com/photos/nitelynx/44189973/" target="_blank">&#8220;</a></em><a href="http://www.ï¬‚ickr.com/photos/nitelynx/44189973/" target="_blank">NiteLynx.&#8221;</a></p>
<p>As I mentioned before, many of the ideas raised at Where 2.0 were unpacked and worked through at WhereCamp. For example, Aaron introduced a word <strong>psychosynthography</strong> in the last 24 seconds of his talk at Where 2.0.</p>
<p>So I spent as much time as I could listening to Aaron at WhereCamp, and asking him about psychosynthography and more (post of this interview upcoming).</p>
<p>Aaron urged the Where 2.0 audience to pay attention to the Psychogeography movement seeded by <a title="Guy Debord" href="http://en.wikipedia.org/wiki/Guy_Debord">Guy Debord</a>, and<strong> â€œto wear geography like a perfume.â€</strong></p>
<p>Joseph Hart writes in a <a href="http://www.utne.com/2004-07-01/a-new-way-of-walking.aspx" target="_blank">â€œNew Way of Walking</a>â€ psychogeography is:<strong> </strong></p>
<p><strong>â€œa whole toy box full of playful, inventive strategies for exploring citiesâ€¦just about anything that takes <span class="mw-redirect">pedestrians</span> off their predictable paths and jolts them into a new awareness of the urban landscape.â€</strong></p>
<p><strong> </strong></p>
<p><strong> </strong></p>
<h3>Curating Big Data</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/tomcarden.jpg"><img class="alignnone size-medium wp-image-3625" title="tomcarden" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/tomcarden-300x199.jpg" alt="tomcarden" width="300" height="199" /></a></p>
<p><em><a href="http://stamen.com/studio/tom" target="_blank">Tom Carden, Stamen</a>, (picture above) paired with Aaron for the Curating Big Data session. Tom noted: </em></p>
<p><strong>&#8220;The Curating Big Data session for me was an attempt to learn from other attendees (as opposed to teach/lead, as with the Stamen session, &#8220;Real Time Web-Based Visualization and Mapping&#8221;).Â  Also, it was an excuse to get Aaron to recap parts of the Flickr Shapefile story for WhereCamp folks, and to get *input* on how to do more things like it. I was a bit disappointed that nobody had really good examples for us, but I was happy with Brad Stenger&#8217;s suggestion to look into the upcoming census data as a relevant area.&#8221;</strong></p>
<p>Aaronâ€™s work on the The Shape of Alpha and The Corrections project shows, as Tom noted:</p>
<p><strong>â€œwhat you can do once you have 150 million geotagged photos, and millions of users who are willing to say I took this thing here and my name for that place is â€¦..â€</strong></p>
<p>And part of the significance of opening up the GeoPlanet data set is that now:</p>
<p><strong>â€œwe can try and start talking about the same places, as far as, [for example], these shape files go. So if you are interested in what comes out of the Flickr shape files project and but you also have your own opinion about what shape those places are so the IDs have be open you have to be sure that you are talking about the same thing in the first place.â€</strong></p>
<p>And, as Tom pointed out, collaborating over geo data informs us about curating any big dataset:</p>
<p><strong>â€œit should lead to an overarching discussion about any kind of dataset geo or otherwise and ways in which we can talk about it, and think about patterns for improving that data, for collaborating, even on things like cleanup.â€</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/realtimewebbased-visualizationandmapping.jpg"><img class="alignnone size-medium wp-image-3681" title="realtimewebbased-visualizationandmapping" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/realtimewebbased-visualizationandmapping-300x199.jpg" alt="realtimewebbased-visualizationandmapping" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/curatingbigdatapost.jpg"><img class="alignnone size-medium wp-image-3739" title="curatingbigdatapost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/curatingbigdatapost-300x199.jpg" alt="curatingbigdatapost" width="300" height="199" /></a></p>
<p><em>Warp speed geo-genius Andrew Turner, <a href="http://www.fortiusone.com/" target="_blank">Fortius One</a><a href="http://www.fortiusone.com/" target="_blank">,</a> took these excellent notes for the &#8220;Real Time Web-Based Visualization and Mapping&#8221; (on left) and &#8220;Curating Big Data&#8221; (on the right).</em></p>
<p><em> </em></p>
<p>On my way to Where 2.0 I took the train from SFO to San Jose which was a delight but a little slower than I imagined. So, unfortunately, I arrived on Tuesday just after <a href="http://en.oreilly.com/et2009/public/schedule/speaker/3486">Michal Migurski</a> (Stamen Design),  	 		<a href="http://en.oreilly.com/et2009/public/schedule/speaker/40013">Shawn Allen</a> (Stamen Design) presentedÂ  	 		 			<a class="attach" href="http://assets.en.oreilly.com/1/event/20/Maps%20from%20Scratch_%20Online%20Maps%20from%20the%20Ground%20Up%20Presentation.pdf">Maps from Scratch: Online Maps from the Ground Up. </a> This was on my MUST attend list and<em> </em>it was a wonderful opportunity to get into,<em> </em>&#8220;Real Time Web-Based Visualization and Mapping.&#8221;Â Â  I did get a chance to talk to Michal and Shawn a bit later in the conference but I will try to catch up with them soon for an in depth story.Â  Below isÂ  Shawn Allen&#8217;s map of overlapping data sets from, <a href="http://www.flickr.com/photos/shazbot/3282821808/" target="_blank">&#8220;Trees, cabs and crime in San Francisco:&#8221; </a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/treescrimecabs.png"><img class="alignnone size-medium wp-image-3743" title="treescrimecabs" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/treescrimecabs-300x273.png" alt="treescrimecabs" width="300" height="273" /></a></p>
<p>Another follow up I am really looking forward to making is with <a href="http://lizbarry.com/s+em/contact.htm" target="_blank">Liz Barry</a> and her work on <a href="http://lizbarry.com/s+em/about.htm" target="_blank">S+EM</a>, &#8220;an environmental mapping and social networking design project          that links New York City trees with the people who care for them&#8221; (also see, <a href="http://fuf.net/" target="_blank">Creating a Greener San Francisco Tree by Tree</a>).Â  Also I got a chance to talk to another fellow New Yorker (we have to travel to the West Coast to find time to chat!), <a href="http://radar.oreilly.com/jgeraci/" target="_blank">John Geraci</a> of <a href="http://diycity.org/" target="_blank">DIY City</a> who presented  	 		 			<a class="attach" href="http://assets.en.oreilly.com/1/event/25/DIY%20City_%20An%20Operating%20System%20for%20Cities%20Presentation.zip">DIY City:Â  An Operating System for Cities.</a></p>
<h3>Machine Intelligence and Human Intelligence</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/aaronandandrew.jpg"><img class="alignnone size-medium wp-image-3622" title="aaronandandrew" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/aaronandandrew-300x199.jpg" alt="aaronandandrew" width="300" height="199" /></a></p>
<p><em>Aaron Cope, Flickr, on the left is talking to Andrew Turner on the right the CTO of FortiusOne (see Andrewâ€™s presentation at Where 2.0, <a href="http://blip.tv/file/2167650" target="_blank">â€œYour Own Private Geo Cloudâ€</a>)</em></p>
<p>Many of the most interesting conversations happened in between sessions at WhereCamp and Where 2.0.</p>
<p>I caught this one in which Aaron Cope and Andrew Turner where discussing some of ideas Aaron raised in his presentation, <a href="http://www.slideshare.net/straup/capacity-planning-for-meaning-presentation-637370?type=powerpoint" target="_blank">â€œCapacity planning for meaning in the age of personal informaticsâ€</a> (see Aaronâ€™s blog post, <a href="http://www.aaronland.info/weblog/2008/10/08/tree/" target="_blank">Tree planting and tree hugging in the age of personal informatics</a>). The core question they were discussing was what happens when you wire the world at the scale people are talking about and it breaksâ€¦ Aaron argues that you already have a whole class of people in systems operations that can tell us a lot about how to answer this question.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/rossmayfieldsocialtextpost.jpg"><img class="alignnone size-medium wp-image-3594" title="rossmayfieldsocialtextpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/rossmayfieldsocialtextpost-300x199.jpg" alt="rossmayfieldsocialtextpost" width="300" height="199" /></a></p>
<p><em><span class="bio">Ryan and Anselm shared the pulpit for the morning circle pulpit with <a href="http://ross.typepad.com/" target="_blank">Ross Mayfield</a> of <a href="http://www.socialtext.com/" target="_blank">Social Text </a>who was the generous host to WhereCamp.</span></em></p>
<h3>Social Reality Mining</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/benjaminbratton1.jpg"><img class="alignnone size-medium wp-image-3651" title="benjaminbratton1" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/benjaminbratton1-300x199.jpg" alt="benjaminbratton1" width="300" height="199" /></a></p>
<p><strong>â€œAs it stands today, we have no idea what terms and limits of a cloud based citizenship of the Google Caliphate will entail and curtail. Some amalgam of post-secular cosmopolitanism, agonistic radical democracy, and post-rational actor microecomics, largely driven by intersecting petabyte at-hand datasets and mutant strains of Abrahamaic monotheism. But specifically, what is governance (let alone government) within this?â€ </strong><a href="http://bratton.info/" target="_blank">from Benjamin Brattonâ€™s</a> talk at ETech 2009 (picture above)<strong>, </strong><a href="http://www.bratton.info/emergency.html" target="_blank">Undesigning the Emergency: Against Prophylactic Urban Membranes</a>.</p>
<p>The other big take away from WhereWeek &#8211; Where 2.0 and WhereCamp, was not so much news, but a confirmation of something that has been pretty clear for a while now. (Check out <a href="http://radar.oreilly.com/2008/05/the-results-of-reality-mining.html" target="_blank">Bradyâ€™s posts on reality mining at Where 2.0 last year</a>).</p>
<p>We are moving headlong into the era of reality mining with all its myriad possibilities from: &#8220;hedonistic optimization&#8221; (this term came from <a href="http://brainofstig.ai/" target="_blank">Stig Hackvan</a> when I asked him about some of the ideas central to the <a href="http://docs.google.com/tecfa.unige.ch/%7Enova/headmap-manifesto.PDF" target="_blank">HeadMap Manifesto</a> -more about HeadMap later in this post); to new forms of marketing (social reality mining the inside to predict if someone is going to trade business cards in the next 120 seconds &#8211; <a href="http://en.oreilly.com/where2009/public/schedule/speaker/46016" target="_blank">Alex â€œSandyâ€ Pentland, MIT, Where 2.0</a>);Â  to stuff that matters to save us from mass extinction like distributed sustainability &#8211; greening production and consumption and our cities; to open government;Â  empowering indigenous communities (also see Rebecca Moore&#8217;s<a href="http://en.oreilly.com/where2009/public/schedule/speaker/43557" target="_blank"> </a><a class="attach" href="http://assets.en.oreilly.com/1/event/25/Indigenous%20Mapping_%20Emerging%20Cultures%20on%20the%20Geoweb%20Presentation.ppt">Indigenous Mapping: Emerging Cultures on the Geoweb Presentation</a>); and not to be forgotten, the troubling possibility of new forms of social control.</p>
<h3>Smart phones are powerful networked sensor devices in the palm of our hand</h3>
<p>As Sandy Pentland MIT pointed out in his Where 2.0 keynote, <a href="http://en.oreilly.com/where2009/public/schedule/detail/7956" target="_blank">â€œReality Mining for Companies, or, How Social Networks Network Best,â€</a> mobile phones have created an ubiquitous instrumented reality that goes way deeper than location awareness. Smart phones are powerful networked sensor devices in the palm of our hand that know a lot more about us than location. With proximity, motion, (accelerometers), voice, images, call logs, email &#8211; what is enabled is not just knowing where people are but knowing more about them.</p>
<p>Many of the issues raised by <a href="http://speedbird.wordpress.com/" target="_blank">Adam Greenfield</a> in <a href="http://speedbird.wordpress.com/my-book-everyware-the-dawning-age-of-ubiquitous-computing/" target="_blank">Everyware</a> and in <a href="../../2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">my interview with Adam</a> were on my mind during WhereWeek, also questions that were distilled and explored in this presentation by Matt Jones last year, <a href="http://www.slideshare.net/blackbeltjones/polite-pertinent-and-pretty-designing-for-the-newwave-of-personal-informatics-493301" target="_blank">Polite, Pertinent, andâ€¦ Pretty: Designing for the New-wave of Personal Informatics</a> and <a href="http://www.slideshare.net/tmo/the-web-in-the-world-presentation" target="_blank">Timo Arnallâ€™s presentation, The Web in the World</a>.</p>
<h3>Google Wave, PachubeÂ  Feeds, Sensor Networks and Microsyntax!</h3>
<p><object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="560" height="340" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowFullScreen" value="true" /><param name="allowscriptaccess" value="always" /><param name="src" value="http://www.youtube.com/v/pi4MhQgGNqI&amp;hl=en&amp;fs=1" /><param name="allowfullscreen" value="true" /><embed type="application/x-shockwave-flash" width="560" height="340" src="http://www.youtube.com/v/pi4MhQgGNqI&amp;hl=en&amp;fs=1" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
<p><em><a id="o_ok" title="Visualizing 24 hours of @pachube" href="http://is.gd/IYOj" target="_blank">Visualizing 24 hours of Pachube</a> logs, feeds all around the world -Â  built with Processing.</em></p>
<p>I found myself really wishing <a href="http://www.pachube.com/" target="_blank">Pachube</a> founder Usman Haque had been able to come to Where 2.0 this year &#8211; Usman was originally on the Where 2.0 schedule but had to drop out. My small contribution to WhereCamp was to discuss <a href="http://www.pachube.com/" target="_blank">Pachube</a>, <a href="http://www.haque.co.uk/naturalfuse.php" target="_blank">Natural Fuse</a> and <a href="http://www.shaspa.com/" target="_blank">OpenShaspa</a> in the, Urban Eco-Managment session (<a href="../../2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">see my interview with Pachube Founder, Usman Haque here</a>).</p>
<p>Pachube announced &#8211; <a id="du7_" title="mapping mobile feeds in realtime" href="http://is.gd/BjJT" target="_blank">mapping mobile feeds in realtime</a>, with 3d datastream value time &amp; location based graphing just before Where 2.0.</p>
<p>And, as I was writing up this post, I was delighted to see <a href="http://www.wired.com/beyond_the_beyond/2009/05/spime-watch-pachube-feeds/" target="_blank">this post by Bruce Sterling on Pachube Feeds</a> and his challenge, offering:</p>
<p><strong>&#8220;(((Extra credit for eager ubicomp hackers: combine this [pachube feeds] with Googlewave, then describe it in microsyntax. Hello, 2015!)))&#8221;</strong></p>
<p>Also Anselm Hook, who has an extensive background in video game development, made an interesting point about Google Wave to me:</p>
<p><strong>&#8220;btw &#8211; there is a preexisting metaphor for the wave &#8211; the wave is notable in that it is making the web like a videogame &#8211; its bringing real time many participant shared interaction to the web&#8221;</strong></p>
<div id="a9iz" style="text-align: left;">And see <a href="http://radar.oreilly.com/2009/05/google-wave-what-might-email-l.html" target="_blank">Tim Oâ€™Reillyâ€™s post</a> for more on the significance of Wave, which <a href="http://www.techcrunch.com/2009/05/28/google-wave-drips-with-ambition-can-it-fulfill-googles-grand-web-vision/">Google previewed for developers at its I/O conference</a>:</div>
<p><strong>â€œJens, Lars, and team re-imagined email and instant-messaging in a connected world, a world in which messages no longer need to be sent from one place to another, but could become a conversation in the cloud. Effectively, a message (a wave) is a shared communications space with elements drawn from email, instant messaging, social networking, and even wikis.â€ </strong></p>
<p>For more on microsyntax see <a href="http://www.microsyntax.org/" target="_blank">microsyntax.org</a></p>
<p>Aaron pointed out to me re microsyntax:</p>
<p><strong>&#8220;This is ultimately the &#8220;magic word&#8221; problem, which is essentially the semweb vs. google-is-smarter-than-you problem.&#8221;</strong></p>
<p>I will have some more questions for Aaron on the the &#8220;magic word&#8221; problem in my upcoming interview post.Â  At the moment I am busy studying some of the thoughts in these links.</p>
<p><a href="http://delicious.com/straup/magicwords" target="_blank">http://delicious.com/straup/magicwords</a></p>
<p><a href="http://www.slideshare.net/straup/the-papernet/22" target="_blank">http://www.slideshare.net/straup/the-papernet/22</a></p>
<p><a href="http://www.xml.com/pub/a/2005/02/16/edfg.html" target="_blank">http://www.xml.com/pub/a/2005/02/16/edfg.html</a></p>
<p><a href="http://xtech06.usefulinc.com/schedule/paper/135" target="_blank">http://xtech06.usefulinc.com/schedule/paper/135</a></p>
<p><strong> </strong></p>
<p><strong> </strong></p>
<h3>Privacy: Towards a Win Win and Community Sensing</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/communitysensing.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/erichorvitz21.jpg"><img class="alignnone size-medium wp-image-3659" title="erichorvitz21" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/erichorvitz21-300x199.jpg" alt="erichorvitz21" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/communitysensing.jpg"><img class="alignnone size-medium wp-image-3655" title="communitysensing" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/communitysensing-300x199.jpg" alt="communitysensing" width="300" height="199" /></a></p>
<p>While a key element ofÂ  Yahoo! Geo Technologies portfolio of platforms, <a href="http://fireeagle.yahoo.net/" target="_blank">FireEagle</a>, not only gives an important set of tools to allow people to &#8220;share their location with sites and services through the Web or a mobile device&#8221; but also offers up some vital privacy tools, the community sensing work of Eric Horvitz takes privacy and data sharing into new terrain.</p>
<p>Eric didnâ€™t have time to discuss his privacy work in his Where 2.0 presentation, <a href="http://en.oreilly.com/where2009/public/schedule/detail/8911" target="_blank">Where, When, Why, and How: Directions in Machine Learning and Reasoning about Location</a>, &#8211; it came up in his very last slide. But I ran up after his talk with my trusty old ipod recorder in hand, and got the part we missed! Fascinating stuff that will be the subject of an upcoming interview post. Hereâ€™s a little taste of what is to come. Eric describes one of the directions his team will be exploring.</p>
<p><strong>â€œOne thing I want to do, on our research team, Iâ€™d like to develop something very simple for people to use. A challenging problem with privacy is usability and controls. Aunt Polly and Uncle Herbie just donâ€™t get all these authentication controls and sliders, nor do they want to invest in figuring them out. They also donâ€™t get why theyâ€™re being asked with pop up windows to yes or no to various questions and so on. One Idea is having a useable privacy lens, that you can hold up anywhere and it tells you what youâ€™re showing anybody or any organization, what does the world know about you. And you would like to have buttons to turn sharing off for some items. You&#8217;d also like to have a way to go back in time and view prior sharing and logging over periods of time, and to have buttons to push to say erase that segment of your logs.â€</strong></p>
<p><strong> </strong></p>
<p>Understanding the social implications of what it means to live in an instrumented world is a topic that we cannot afford not think about. But luckily there are lot of people who have been thinking pretty deeply about this for a while now.</p>
<p>And I did my best at both Where 2.0 and WhereCamp to seek out as many of geothinkers as I could, and do interviews wherever possible (I have not had time to mention everyone I talked to in this post but hopefully all the interviews will get on Ugotrade soon!)</p>
<p><span style="font-family: Arial,Helvetica,sans-serif; font-size: x-small;"> </span></p>
<h3>HeadMap Manifesto</h3>
<p>In the bar of The Fairmont on the last night of Where 2.0, I heard some of the history of Where 2.0, <a href="http://geowanking.org/mailman/listinfo/geowanking_geowanking.org" target="_blank">GeoWanking</a>, and <a href="http://docs.google.com/tecfa.unige.ch/%7Enova/headmap-manifesto.PDF" target="_blank">The HeadMap Manifesto</a> from Sophia Parafina, Director of Operations for <a href="http://opengeo.org/" target="_blank">OpenGeo</a> and <a href="http://testingrange.com/" target="_blank">Rich Gibson</a>, programmer, <a href="http://geowanking.org/mailman/listinfo/geowanking_geowanking.org" target="_blank">GeoWanker</a>,Â <a href="http://gigapan.org/index.php" target="_blank"> Gigapanner</a> and co-author of <a href="http://mappinghacks.com/" target="_blank">Mapping Hacks </a>with <a href="http://iconocla.st/cv.html" target="_blank">Schuyler Erie</a> and <a href="http://frot.org/" target="_blank">Jo Walsh</a> (Jo did a lot <a href="http://frot.org/s/semantic_city.html" target="_blank">of key early work on bottom up urban informatics </a> but unfortunately couldn&#8217;t make it to WhereWeek this year).</p>
<p>Check <a id="zaq4" title="Gigapan.org" href="http://www.gigapan.org/index.php" target="_blank">Gigapan.org</a> out! <strong>&#8220;The GigaPan<span class="trademark">SM</span> process allows users to upload, share, and explore brilliant gigapixel+ panoramas from around the globe.&#8221;</strong></p>
<p>Also I interviewed Paul Ramsey, Senior Consultant, OpenGeo, so more on OpenGeo is upcoming (see Paulâ€™s <a href="http://blog.cleverelephant.ca/2009/05/where-re-cap.html" target="_blank">Where ReCap</a>). <a href="http://en.oreilly.com/where2009/public/schedule/speaker/43773"> Justin Deoliveira</a> (OpenGeo) andÂ   	 		<a href="http://en.oreilly.com/where2009/public/schedule/speaker/59688">Sophia Parafina</a> did a session, <a class="url uid" name="session7165" href="http://en.oreilly.com/where2009/public/schedule/detail/7165">GeoServer, GeoWebCache + OpenLayers: The OpenGeo Stack,</a><span class="url uid"> which unfortunately I missed as it </span><span class="url uid">was before I arrived Tuesday.</span><a class="url uid" name="session7165" href="http://en.oreilly.com/where2009/public/schedule/detail/7165"></a></p>
<div id="page_title"><strong> </strong></div>
<p><span class="bio"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/sophiaandrich.jpg"><img class="alignnone size-medium wp-image-3631" title="sophiaandrich" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/sophiaandrich-300x199.jpg" alt="sophiaandrich" width="300" height="199" /></a></span></p>
<p>I met Rich Gibson <a href="http://www.flickr.com/photos/ugotrade/sets/72157615022689427/" target="_blank">at Etech 2009 playing Werewolf</a> and Rich introduced me to his co-author on <a href="http://search.barnesandnoble.com/Mapping-Hacks/Schuyler-Erie/e/9780596007034" target="_blank">Mapping Hacks</a> and alpha geek supreme, Schuyler Erie, who also wrote the clustr code that The Shape of Alpha uses.</p>
<p><a href="http://joshua.schachter.org/" target="_blank">Joshua Schachter</a> founder of Delicious and the <a href="http://geowanking.org/mailman/listinfo/geowanking_geowanking.org" target="_blank">GeoWanking mailing list</a>, [and <a href="http://geourl.org/" target="_blank">GEOURL </a>- and <a href="http://memepool.com/" target="_blank">MemePool!] </a> now at Google came to WhereCamp and was mobbed by a small crowd eager to get their hands on one of the developer G Phones he was handing out from a large box.</p>
<p>GeoWanking, which is now run by Oâ€™Reilly Media, has been the incubator for all things location aware and â€œneogeographyâ€ discussions since 2003 &#8211; check out â€˜<a href="http://sproke.blogspot.com/2009/05/paleogeography-vs-neography.html" target="_blank">sproke</a> for a <a href="http://sproke.blogspot.com/2009/05/paleogeography-vs-neography.html">Paleogeography vs Neogeography </a>(which, as Sophia notes, was a common topic of discussion at Where 2.0) smack down in which geowanking rules in the form of a list traffic comparison.</p>
<p>Sophia and Rich shared some of their perspective on the early days of GeoWanking and the creation of the HeadMap Manifesto with me and pointed me to many other people to talk to. The prime mover of the Headmap manifesto, Ben Russell, has retired from the scene &#8211; perhaps bored by seeing a radical vision gone thoroughly mainstream, or exhausted by the rigors of carrying an idea through the early blue sky years, or just s simply doing something else? I donâ€™t know.</p>
<p><a href="http://docs.google.com/tecfa.unige.ch/%7Enova/headmap-manifesto.PDF" target="_blank">The HeadMap Manifesto</a> is still vibrant today even as much of what it envisaged has already been realized. HeadMap assembled the future in a poetry of fragments:</p>
<p><strong>â€œyou can search for sadness in new york people within a mile of each other who have never met stop what they are doing and organize spontaneously to help with some task or other.â€</strong></p>
<p>Anselm explained to me what powered all this social cartography revolution, from his POV, was actually IRC.</p>
<p><strong>&#8220;We had a channel on IRC called &#8220;#geo&#8221;. Â And many of us met there.Â  I met Ben Russell at MathEngine in the UK. Ben and I were fascinated by the future of maps.Â  Ben, Jo and I met Schuyler, Dav, Dan Brickley (who worked for Tim Berner&#8217;s Lee who invented the Web), Rich Gibson, Joshua Schachter (who was just a geek at Morgan Stanley at the time ) &#8230; and the snowball took off&#8230;. Â many others.</strong></p>
<p><strong>We stormed ETECH ( Schuyler met Jo there). Â We got invited to FooCamp. Schuyler was married to Jo by Marc Powell (Food Genome) and lived at his house. Â We pushed so hard on the social cartography revolution.</strong></p>
<p><strong>I did a spinny globe for geourl &#8211; a project by some hacker named Joshua Schachter&#8230; Â we were all friends for years and we had never even met.&#8221;</strong></p>
<p><strong></strong></p>
<p><strong></strong></p>
<h3>â€œCan AR researchers harness these new approaches to index reality?â€</h3>
<p><object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="425" height="344" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowFullScreen" value="true" /><param name="allowscriptaccess" value="always" /><param name="src" value="http://www.youtube.com/v/y_LXpqmdk9U&amp;hl=en&amp;fs=1" /><param name="allowfullscreen" value="true" /><embed type="application/x-shockwave-flash" width="425" height="344" src="http://www.youtube.com/v/y_LXpqmdk9U&amp;hl=en&amp;fs=1" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
<p>Radioheadâ€™s laser (as opposed to video) clip made using <a href="http://www.velodyne.com/lidar/" target="_blank">Lidar</a></p>
<p><a id="t7u3" title="If you have read my interview with Ori Inbar," href="../../2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/" target="_blank">If you have read my interview with Ori Inbar,</a> you will know how excited I was to attend The Mobile Reality panel.Â  <a href="http://en.oreilly.com/where2009/public/schedule/detail/7197" target="_blank">The video is up</a> and it is really awesome to hear <a href="http://en.oreilly.com/where2009/public/schedule/speaker/35457">Raven Zachary</a> (on twitter @<a href="http://www.twitter.com/ravenme">ravenme</a>) get into the fray with augmented reality.</p>
<p>The main take away for me from the Mobile Reality panel was that we shouldn&#8217;t get too hung up on the difficulties of achieving fully immersive visual augmented reality and twiddle our thumbs waiting for the long anticipated sexy lightweight eyeware &#8211; which is still in a coming soon phase (for more on immersive augmented reality see my upcoming interview with <a href="http://www.cc.gatech.edu/%7Eblair/home.html" target="_blank">Blair MacIntyre</a>). Because, in the meantime, there are plenty of delightful and useful ways to augment our experience of the world &#8211; and not all of these augmented realities rely soley on smart phones as John S. Zeleck showed in his presentation on <a href="http://en.oreilly.com/where2009/public/schedule/speaker/43786" target="_blank">â€œWearable Sensory Substitution Device for Navigation.â€</a> Also I had an interesting discussion at lunch with Ori Inbar about the use of audio for augmented reality projects.</p>
<p>Where 2.0 clearly demonstrated that we have an unprecedented amount of information from mapping our world, <a href="http://gamesalfresco.com/2009/05/26/where-2-0-the-world-is-mapped-now-use-it-to-augmented-our-reality/" target="_blank">Ori Inbar noted in his conference roundup. </a> Ori writes:</p>
<p><strong>&#8220;My point is not a shocker: all we need is to tap into this information and bring it, in context, into people&#8217;s field of view.&#8221;</strong></p>
<p>As Ori noted <strong><a href="http://www.earthmine.com/" target="_blank">Earthmine</a></strong> and <strong><a href="http://www.velodyne.com/lidar/" target="_blank">Velodyne&#8217;s Lidar</a></strong> showed off two new approaches to mapping the world that have potential to create new opportunities for augmented reality:</p>
<p><strong><strong><a href="http://www.earthmine.com/" target="_blank">&#8220;Earthmine</a></strong> uses its own camera-based device to index reality, at the street level, one pixel at a time. They have just announced <a href="http://wildstylecity.com/wsc/" target="_blank">Wild Style City</a> an application that allows anyone to create virtual graffitis on top of designated public spaces. However, at this point, you can only experience it on a pc!&#8221;</strong></p>
<p><a href="http://www.velodyne.com/lidar/" target="_blank">Lidar</a>, Ori notes, has also embarked on a mission to map the outdoors. But, the question Ori highlights is:</p>
<p><strong>â€œCan AR researchers harness these new approaches to index reality?â€</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/johnzelekandbradyforrest.jpg"><img class="alignnone size-medium wp-image-3660" title="johnzelekandbradyforrest" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/johnzelekandbradyforrest-300x199.jpg" alt="johnzelekandbradyforrest" width="300" height="199" /></a></p>
<p>Brady Forrest inspects John S. Zelekâ€™s <a href="http://en.oreilly.com/where2009/public/schedule/speaker/43786" target="_blank">â€œWearable Sensory Substitution Device for Navigationâ€</a> at Where Fair before putting it on and being guided by sensory nudges at the cardinal points in the belt.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/bradyforrestpost.jpg"><img class="alignnone size-medium wp-image-3661" title="bradyforrestpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/bradyforrestpost-199x300.jpg" alt="bradyforrestpost" width="199" height="300" /></a></p>
<h3>Coolest Mobile Locative Media App. at Where Fair</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/picture-61.png"><img class="alignnone size-full wp-image-3682" title="picture-61" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/picture-61.png" alt="picture-61" width="176" height="269" /></a></p>
<p><a href="http://www.sonycsl.co.jp/person/shio.html" target="_blank">Atsushi Shionozaki </a>of<strong> <a href="http://www.placeengine.com/en" target="_blank">Place Engine</a></strong> &#8211; &#8220;<strong>a core technology that enables a device equipped with Wi-Fi such as a laptop PC or smart phone to determine its current location,&#8221; </strong>demoed the coolest location aware mobile app in Where Fair &#8211; <a id="uwuf" title="Oedo Yokai" href="http://service.koozyt.com/oedo/" target="_blank">Oedo Yokai</a>. Working with ethnologist, Dr. Hiro Kubota and artist Atsushi Morioka, &#8220;Oedo Yokai&#8221; is <a id="gtb2" title="Koozyt's" href="http://www.koozyt.com/" target="_blank">Koozyt&#8217;s</a> <strong>&#8220;first attempt to cross IT (Location Information) and Folkloristics.&#8221; </strong></p>
<p><strong>&#8220;The Japanese &#8220;Yokai&#8221; are known to dwell and appear at specific locations. They can frequently be seen within the grounds of shrines and temples, believed to be the border between this world and the afterlife, or in more common places like on a hill or at a crossroads. If the &#8220;Yokai&#8221; symbolize the mystery, legend, and lore associated with places, as our interests fade from actual locations, the rol, es they play in modern day society will diminish, and the &#8220;Yokai&#8221; might then cease to appear at all.&#8221;</strong></p>
<p><strong></strong>I love this idea of bringing the ancient spirits of place back into our lives with our new tools of location awareness.</p>
<p>Odeo Yokai also reminds me of Aaron Straup Cope&#8217;s work on &#8220;<a href="http://www.aaronland.info/weblog/2008/07/27/invisible/#historybox" target="_blank">the idea of every spot being a &#8220;history box&#8221;</a> which he explained is &#8220;one of the threads behind<a href="http://blog.flickr.net/en/2009/02/24/an-abundant-present/" target="_blank"> the &#8216;nearby&#8217; project at Flickr</a>.&#8221;</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/oedoyokai.jpg"><img class="alignnone size-medium wp-image-3683" title="oedoyokai" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/oedoyokai-300x199.jpg" alt="oedoyokai" width="300" height="199" /></a></p>
<h3>The Food Genome</h3>
<p>I cannot end this roundup of WhereWeek without a mention of <a href="http://www.foodgenome.com/home" target="_blank">The Food Genome</a>.</p>
<p><strong>&#8220;Food Genome is a big hungry brain that scours the internet, trying to learn everything there is to know about food.&#8221;</strong></p>
<p>Watch out for the upcoming launch of this project, it stole the show with an exciting presentation at WhereCamp. You can follow <a href="http://twitter.com/foodgenome">@foodgenome on Twitter</a> now.</p>
<p>To get one of the gorgeous Food Genome brochures you had to ask Mark Powell a good question. Notice an eager hand reaching out in the picture below. I asked, â€œhow would the basic building blocks of the food genome be licensed?â€ I got my brochure and a rain check on an answer to my question.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/foodgenomepost.jpg"><img class="alignnone size-medium wp-image-3664" title="foodgenomepost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/foodgenomepost-199x300.jpg" alt="foodgenomepost" width="199" height="300" /></a></p>
<h3>The Ubiquitous Media Studio</h3>
<p><strong></strong>Another highlight of WhereCamp was hearing from <a id="nfup" title="Gene Becker" href="http://lightninglaboratories.com/about.html" target="_blank">Gene Becker</a> about his new project, <a id="bs9-" title="Ubiquitous Media Studio" href="http://ubistudio.org/" target="_blank">Ubiquitous Media Studio</a> which will be located in Palo Alto. The project is still in the early stages of devlopment but it sounds really exciting. I am looking forward to being involved from East Coast.Â  If you&#8217;re curious where this is going, <strong><a href="http://twitter.com/ubistudio">follow @ubistudio on Twitter</a></strong> to stay updated.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/gene.jpg"><img class="alignnone size-medium wp-image-3684" title="gene" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/06/gene-300x300.jpg" alt="gene" width="300" height="300" /></a></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/06/02/location-becomes-oxygen-at-where-20-wherecamp/feed/</wfw:commentRss>
		<slash:comments>14</slash:comments>
		</item>
		<item>
		<title>Creating the Information Landscapes of the Future: Locative Media, Loose Interaction Topologies, and The Shape of Alpha</title>
		<link>https://www.ugotrade.com/2009/05/17/creating-the-information-landscapes-of-the-future-locative-media-and-the-shape-of-alpha/</link>
		<comments>https://www.ugotrade.com/2009/05/17/creating-the-information-landscapes-of-the-future-locative-media-and-the-shape-of-alpha/#comments</comments>
		<pubDate>Sun, 17 May 2009 20:13:49 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[3D mapping for AR]]></category>
		<category><![CDATA[Aaaron Straup Cope]]></category>
		<category><![CDATA[augmented reality systems]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[body controllers]]></category>
		<category><![CDATA[community mapping]]></category>
		<category><![CDATA[Etech 2009]]></category>
		<category><![CDATA[experimental human-computer interfaces]]></category>
		<category><![CDATA[flea market mapping]]></category>
		<category><![CDATA[geotagged photos]]></category>
		<category><![CDATA[image recognition]]></category>
		<category><![CDATA[Information Landscapes]]></category>
		<category><![CDATA[information landscapes of the future]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[internet 2.0]]></category>
		<category><![CDATA[ITP Spring Show 2009]]></category>
		<category><![CDATA[jim purbrick]]></category>
		<category><![CDATA[locative media]]></category>
		<category><![CDATA[locative media manifesto]]></category>
		<category><![CDATA[loose interaction topologies]]></category>
		<category><![CDATA[Mike Kuniavsky]]></category>
		<category><![CDATA[mining geotagged photos]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[mud pong]]></category>
		<category><![CDATA[Mud Tub]]></category>
		<category><![CDATA[multi-touch surfaces]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Robert Rice]]></category>
		<category><![CDATA[S Ring]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[shapefiles]]></category>
		<category><![CDATA[smart mud]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[Where 2.0]]></category>
		<category><![CDATA[Where Week 2009]]></category>
		<category><![CDATA[WhereCamp]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3521</guid>
		<description><![CDATA[I am excited about going to Where Week 2009 &#8211; Where 2.0 and WhereCamp, this week (for more see Brady Forrest&#8217;s post).Â  Where Week will be total immersion for five days in a think tank with creators of the information landscapes of the future. As you know, if you have read my previous post &#8211; [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/looseinteractionphilosophiespost.jpg"><strong></strong></a><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/shapefiles.jpg"><img class="alignnone size-medium wp-image-3533" title="shapefiles" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/shapefiles-150x300.jpg" alt="shapefiles" width="150" height="300" /></a></strong></p>
<p>I am excited about going to <a href="http://radar.oreilly.com/2009/05/where-week-2009.html" target="_blank">Where Week</a><a href="http://radar.oreilly.com/2009/05/where-week-2009.html" target="_blank"> 2009</a> &#8211; <a href="http://en.oreilly.com/where2009/" target="_blank">Where 2.0 </a>and <a href="http://wherecamp2009.eventbrite.com/" target="_blank">WhereCamp,</a> this week (for more <a href="http://radar.oreilly.com/2009/05/where-week-2009.html" target="_blank">see Brady Forrest&#8217;s post</a>).Â  Where Week will be total immersion for five days in a think tank with creators of the information landscapes of the future.</p>
<p>As you know, if you have read <a href="http://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/" target="_blank">my previous post &#8211; here</a>, I think the <a href="http://en.oreilly.com/where2009/public/schedule/detail/7197" target="_blank">â€œMobile Reality</a>â€ panel is a must.Â  And I have been looking forward to hearing more about <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">The Shape of Alpha</a> from <a href="http://en.oreilly.com/where2009/public/schedule/speaker/43824" target="_blank">Aaron Straup Cope</a>, Flickr, since <a href="http://en.oreilly.com/et2009" target="_blank">Etech 2009</a> when I was introduced to Aaron by <a href="http://www.orangecone.com/" target="_blank">Mike Kuniavsky</a> (see<a href="http://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/" target="_blank"> my interview with Mike Kuniavsky at Etech here</a> and more on Mike&#8217;s concept &#8220;information shadows&#8221; <a href="http://www.orangecone.com/archives/2009/03/etech_2009_the.html">in his Etech talk</a>).</p>
<p>Shape of Alpha is revealing some fascinating possibilities for mining geotagged Flickr images.</p>
<p>As <a href="http://twitter.com/timoreilly/statuses/1777871797" target="_blank">Tim O&#8217;Reilly noted in a tweet</a>, Aaron Straup Cope&#8217;s recent post,<strong> <a href="http://code.flickr.com/blog/2009/05/06/the-absence-and-the-anchor/" target="_blank">The Absence and the Anchor, </a></strong>describes, <strong>&#8220;some of <span class="status-body"><span class="entry-content">the surprising things Flickr is learning about people from geotagged photos.&#8221;</span></span></strong> Aaron&#8217;s post also announces that the &#8220;donut hole shapes&#8221; are available for developers to use with their developer magic via the <a href="http://www.flickr.com/services/api">Flickr API</a>.</p>
<p><strong>&#8220;If the shapefiles themselves are uncharted territory, the donut holes are the fuzzy horizon even further off in the distance. Weâ€™re not really sure where this will take us but weâ€™re pretty sure thereâ€™s something to it all so weâ€™re eager to share it with people and see what they can make of it too.&#8221;</strong></p>
<p>For more on shape files see Aaron&#8217;s blog post about <strong>&#8220;<a href="http://code.flickr.com/blog/2009/01/12/living-in-the-donut-hole/">some experimental work that Iâ€™d been doing with the shapefile data</a> we derive from geotagged photos.&#8221;</strong></p>
<h3>Creating the Information Landscapes of the Future</h3>
<p>I have been thinking and writing a lot about augmented reality lately.Â  And key thought leaders in this space like <a href="http://www.cc.gatech.edu/~blair/home.html" target="_blank">Blair MacIntyre</a>, <a href="http://www.curiousraven.com/" target="_blank">Robert Rice</a><strong> </strong>(<a href="http://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/" target="_blank">see my interview here</a>),<strong> </strong> and<a href="http://gamesalfresco.com/about/" target="_blank"> Ori Inbar</a> (<a href="http://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/" target="_blank">see my interview here</a>), have clued me in to how vital it is, for an ubiquitous experience,<strong> </strong>for us to find ways to allow people to fill in the stories that can be used for augmented reality.</p>
<p>As Ori noted in conclusion to our recent conversation:</p>
<p><strong> &#8220;in order to have a ubiquitous experience like <a href="http://www.curiousraven.com/" target="_blank">Robert Rice</a> and others are striving for, youâ€™ll need to 3d map the world. Google earth like apps are going to help but it is not going to be sufficient. So letâ€™s leverage people. Google became successful in part by making people work with them.Â  Each time you create a link from your blog to my blog their search engines learn from it.Â  So letâ€™s find ways to make people create information that can be used for AR.&#8221;</strong></p>
<p><a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick,</a> another key thinker in this area (interview upcoming), also notes:</p>
<p><strong>&#8220;you can imagine a crowd sourced set of hints for any location so, AR knows roughly where it is and can do photosynth style matchingÂ  to find out exactly what it&#8217;s looking at and get the extra data it needs about that thing (humans are really good image recognition systems, and are also pretty good at interfacing with networks) instead of marking up real objects with ids you take pictures of real objects, tag them and then search them based on images from your ar system.&#8221;</strong></p>
<p>Ori Inbar suggested to me an idea that I really liked &#8211; the notion of bread crumbs where, <strong>&#8220;</strong><span class="ru_50CCC5_tx"><strong>You don&#8217;t have a constant view of what is happening when you walk but you get images and text and all sorts of things from people who walked there before &#8211; like breadcrumbs.</strong>&#8220;Â  And as </span><a href="http://www.designundersky.com/dus/2008/10/31/geotagged-photo-cartography.html" target="_blank">Design Under Sky</a> points out about Shape of Alpha:</p>
<p><strong>&#8220;The truly amazing part of this process is how the &#8220;community&#8221; has the authority to provide areas previously unmapped.Â Â By uploadingÂ personal photos ofÂ areas not covered by mapping software, members have theÂ power of further shrinking our world through greater visual access and understanding ofÂ locations one might not be willing or unable to visit.&#8221; </strong></p>
<p><strong><br />
</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/aaronmiketod.jpg"><img class="alignnone size-medium wp-image-3536" title="aaronmiketod" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/aaronmiketod-300x265.jpg" alt="aaronmiketod" width="300" height="265" /></a></p>
<p><em>Aaron Straup Cope, Flickr, Todd E. Kurt, <a href="http://thingm.com/" target="_blank">ThingM</a> and Mike Kuniavsky, <a href="http://thingm.com/" target="_blank">ThingM</a></em></p>
<h3>The Locative Media Manifesto</h3>
<p><a href="http://stamen.com/" target="_blank">@stamen&#8217;s</a> tweet brought AndrÃ© Lemos&#8217; brilliant, thought provoking, &#8221; <a href="http://www.andrelemos.info/2009/05/locative-media-manifesto.html" target="_blank">Locative Media Manifesto</a>,&#8221; to my attention.Â  I am also looking forward to hearing about how old maps &#8220;can shed light on modern geography when placed in counterpoint to the state of art in modern maps from Google or Microsoft&#8221; from <a href="http://en.oreilly.com/where2009/public/schedule/speaker/3486">Michal Migurski</a>, Stamen Design, who will present <a href="http://en.oreilly.com/where2009/public/schedule/detail/7276" target="_blank">Flea Market Mapping</a> at Where 2.0.</p>
<p>AndrÃ© Lemos writes:</p>
<p><strong>&#8220;After uploading to Matrix up there &#8211; Internet 1.0 &#8211; now is the time to &#8220;download cyberspace,&#8221; information about things down here &#8211; Internet 2.0. We are not dealing with what is virtual up there, but of what to do with all this information about things and places down here! How can we relate to things and places? And now that these things and places are provided with digital information and Internet connections? Do we invoke Heidegger and Lefevbre?&#8221;</strong></p>
<p>I will leave it to people smarter than I to invoke Heidegger and Lefevbre as Andre Lemos does so eloquently in Locative Media Manifesto. But by reminding us artists and activists created the term &#8220;locative media&#8221; to &#8220;question the mass use of LBS (location based services) and LBT (location based technologies,&#8221;Â  the manifesto delivers 30 principles to inspire creators of Locative Media and explorers of the,<strong> &#8220;current dimension of cyberculture, comprising the era of &#8220;cyberspace leaking into the real world&#8221; (Russel, 1999); an era of the &#8220;internet of things.&#8221;</strong></p>
<p>I feel well primed for Where Week by my visit to the <a href="http://itp.nyu.edu/sigs/news/itp-spring-show-2009/" target="_blank">ITP Spring Show, 2009</a> last Sunday. It was an interaction riot, jam packed with brilliance and off beat explorations of locative media which I experienced through the senses of my 9 year old.Â  His pick for best of show is below. But he had many favorites and I have <a href="http://www.flickr.com/photos/ugotrade/sets/72157618216853047/" target="_blank">put some pictures up on my FLickr stream</a> with links to the creator&#8217;s sites.Â  One of my favorite projects Alexander Reeder&#8217;s <a href="http://artandprogram.com/sring/" target="_blank">S Ring</a> &#8211; <a href="http://tishshute.com/seducing-people-by-talking-with-your-hands" target="_blank">&#8220;seducing people by talking with your hands,&#8221; is up on my Posterous blog</a>.Â  You can see a list of the extensive <a href="http://itp.nyu.edu/sigs/news/itp-spring-show-2009/" target="_blank">media coverage the show got here</a>.</p>
<h3>Loose Interaction Topologies</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/mudpongpost.jpg"><img class="alignnone size-medium wp-image-3528" title="mudpongpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/mudpongpost-300x199.jpg" alt="mudpongpost" width="300" height="199" /></a></p>
<p>The picture above is of a game of mud pongÂ  in <a href="http://dirtycomputing.com/" target="_blank">Tom Gerhardt&#8217;s Mud Tub</a>.Â  The mud interface &#8211; &#8220;a smart tub with some mud&#8221; knows the topology of the mud and where your hand is. Mud Tub takes advantage ofÂ  a complex material &#8211; to explore loose interaction topologies, including as seen above a game of Mud Pong.Â  Loose interaction topologies are a way we can explore meaning in &#8220;the internet of things.&#8221;</p>
<p>Tom explained his own exploration of the internet of things to me very succinctly:</p>
<p><strong>&#8220;I am not trying to make mud better. I am trying to make computer</strong><strong>s better with mud.&#8221;</strong></p>
<p>He elaborates on the value of Mud Tub in this regard on his site, <a href="http://dirtycomputing.com/" target="_blank">dirtycomputing</a>:</p>
<p><strong>&#8220;The Mud Tub occupies a space similar to other experimental human-computer interfaces, like, multi-touch surfaces, body controllers, augmented reality systems, etc, which push the boundaries of codified interaction models, and drive the development of innovative software applications. Beyond its role as a research topic, the Mud Tub also exists as an open-sourced hardware/software platform on which interactive artists and designers explore new meth</strong><strong>ods for creating and displaying their work.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/mudpongpost.jpg"><br />
</a></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/05/17/creating-the-information-landscapes-of-the-future-locative-media-and-the-shape-of-alpha/feed/</wfw:commentRss>
		<slash:comments>1</slash:comments>
		</item>
		<item>
		<title>Composing Reality and Bringing Games into Life: Talking with Ori Inbar about Mobile Augmented Reality</title>
		<link>https://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/</link>
		<comments>https://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/#comments</comments>
		<pubDate>Wed, 06 May 2009 14:50:30 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[CurrentCost]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[HomeCamp]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[Kids With Cameras]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[MMOGs]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[open source]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Adam Greenfield]]></category>
		<category><![CDATA[Add new tag]]></category>
		<category><![CDATA[alternate reality games]]></category>
		<category><![CDATA[alternative reality gaming]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[AR eyewear]]></category>
		<category><![CDATA[AR goggles]]></category>
		<category><![CDATA[ARToolkit]]></category>
		<category><![CDATA[augmented reality games]]></category>
		<category><![CDATA[augmented times]]></category>
		<category><![CDATA[Better Place]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Caryatids]]></category>
		<category><![CDATA[Come Out and Play]]></category>
		<category><![CDATA[composing reality]]></category>
		<category><![CDATA[Cory Doctorow]]></category>
		<category><![CDATA[eyewear for augmented reality]]></category>
		<category><![CDATA[game development conference]]></category>
		<category><![CDATA[Games Alfresco]]></category>
		<category><![CDATA[games for preschoolers on the iphone]]></category>
		<category><![CDATA[games on the iphone]]></category>
		<category><![CDATA[GDC 2009]]></category>
		<category><![CDATA[GE augmented reality ad]]></category>
		<category><![CDATA[google earth]]></category>
		<category><![CDATA[green technology]]></category>
		<category><![CDATA[image recognition]]></category>
		<category><![CDATA[Immersive augmented reality]]></category>
		<category><![CDATA[Int 13]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[iphone games]]></category>
		<category><![CDATA[iPhone OS 3]]></category>
		<category><![CDATA[iphone versus the android]]></category>
		<category><![CDATA[ISMAR]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[jane mcgonigal]]></category>
		<category><![CDATA[julian Bleeker]]></category>
		<category><![CDATA[Kati London]]></category>
		<category><![CDATA[Kweekies]]></category>
		<category><![CDATA[Loopt]]></category>
		<category><![CDATA[markerless AR]]></category>
		<category><![CDATA[markerless augmented reality]]></category>
		<category><![CDATA[Microsoft Tag]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile gaming]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Netweaver]]></category>
		<category><![CDATA[open source augmented reality]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Pookatak]]></category>
		<category><![CDATA[Pookatak Games]]></category>
		<category><![CDATA[reality experiences]]></category>
		<category><![CDATA[RFID]]></category>
		<category><![CDATA[Robert Rice]]></category>
		<category><![CDATA[Rouli Nir]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[Shai Agassi]]></category>
		<category><![CDATA[smart environments]]></category>
		<category><![CDATA[smart objects]]></category>
		<category><![CDATA[The End of Hardware]]></category>
		<category><![CDATA[the Pong for augmented reality]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[Tish Shute]]></category>
		<category><![CDATA[Tonchidot]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[ubiquitous augmented reality]]></category>
		<category><![CDATA[ubiquitous experience]]></category>
		<category><![CDATA[virtual reality]]></category>
		<category><![CDATA[WARM 09]]></category>
		<category><![CDATA[Wattzon]]></category>
		<category><![CDATA[Where 2.0]]></category>
		<category><![CDATA[WikiMouse]]></category>
		<category><![CDATA[Wikitude]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3448</guid>
		<description><![CDATA[Recently, I talked to Ori Inbar (above), formerly senior vice- president at SAP.Â  Ori is on a mission to make augmented reality commercially successful not in 5, 10, or 15 years, but now. Ori is the founder of Pookatak Games &#8211; a video game company, &#8220;with a vision to upgrade the way people experience the [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/oriinbarpost.jpg"><img class="alignnone size-medium wp-image-3449" title="oriinbarpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/oriinbarpost-300x199.jpg" alt="oriinbarpost" width="300" height="199" /></a></p>
<p>Recently, I talked to <a href="http://gamesalfresco.com/">Ori Inbar</a> (above), formerly senior vice- president at <a href="http://www.sap.com/">SAP</a>.Â  Ori is on a mission to make augmented reality commercially successful not in 5, 10, or 15 years, but now. Ori is the founder of <a href="http://gamesalfresco.com/about/" target="_blank">Pookatak Games</a> &#8211; a video game company, <strong>&#8220;with a vision to upgrade the way people experience the world.&#8221;</strong> Ori will be participating May 20th, in<a href="http://en.oreilly.com/where2009/public/schedule/detail/7197" target="_blank"> O&#8217;Reilly&#8217;s Where 2.0 panel, &#8220;Mobile Reality</a>&#8221; -Â  an event not to be missed IMO.</p>
<p>The taste for computing anywhere anytime has entered human culture via the iphone and is spreading like chocolate cake and pizza at a preschool party (see <a href="http://gamesalfresco.com/2009/03/23/gdc-2009-why-the-iphone-just-changed-everything/" target="_self">why the iPhone changed everything</a>).Â  And while the full flowering of the next step is yet to come &#8211; computing anywhere, anytime by anyone and <strong>anything </strong><a href="http://en.wikipedia.org/wiki/Internet_of_Things" target="_blank">(&#8220;the internet of things&#8221;</a>), our love for these first devices capable of being <strong>mediating artifacts for ubiquitous computing</strong> (Adam Greenfield) is a vital first step to free us from our tethers to computer screens, and fulfill the promise of augmented reality.</p>
<p>If you need more convincing on the pivotal role augmented reality will play as the web moves into the world, check out Tim O&#8217;Reilly&#8217;s recent comments in <a id="iz1_" title="this video clip on Augmented Times" href="http://artimes.rouli.net/2009/04/tim-oreilly-on-recognition-rfid-and-web.html" target="_blank">this video clip posted on Augmented Times</a> and <a id="wtf4" title="here" href="http://radar.oreilly.com/2008/02/augmented-reality-a-practical.html" target="_blank">here</a> early last year.</p>
<p>From another perspective, the gloomy specter of economic and environmental catastropheÂ  is driving a movement to &#8220;<a id="h5pf" title="infuse intelligence into the way the world work's&quot;" href="http://news.bbc.co.uk/2/hi/technology/7992480.stm" target="_blank">infuse intelligence into the way the world work&#8217;s.&#8221;</a> But the challenge for a smart planet is not just about making environments smart, it is about using smart environments to enable people to act smarter (<a href="http://www.ugotrade.com/2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">see my interview with Adam Greenfield</a>).</p>
<p>We need a rapid upgrade in both the way the world works, and the way we experience the world.</p>
<p>((Note:Â  It is time to read (if you haven&#8217;t already) <a href="http://search.barnesandnoble.com/The-Caryatids/Bruce-Sterling/e/9780345460622" target="_blank">Bruce Sterling&#8217;s Caryatids</a> (<a href="book of the year for 2009" target="_blank">Cory Doctorow&#8217;s book of the year for 2009</a>) &#8220;as a software design manual&#8221; (<a href="http://www.nearfuturelaboratory.com/2009/03/17/design-fiction-a-short-essay-on-design-science-fact-and-fiction/" target="_blank">see Julian Bleeker</a>) because Caryatids reveals the Gordian knots of human folly, greed, compassion and desire entwined in near future designs for technologies to save the world.))</p>
<p>Ori Inbar, worked with Shai Agassi (Shai is now leading the world changing <a id="v5ow" title="Better Place" href="http://www.betterplace.com/" target="_blank">Better Place</a> ) driving <a id="gf_5" title="Netweaver" href="http://en.wikipedia.org/wiki/NetWeaver" target="_blank">Netweaver</a> from a mere concept to a &#8220;major, major business for SAP.&#8221; So Ori has already been through the cycle of working in a very small startup and growing it into a billion dollar business.Â  He has both the experience and the passion to realize his vision for augmented reality.</p>
<p>At Pookatak, he explains :</p>
<p><strong>&#8220;We design â€œreality experiencesâ€ that make usersâ€™ immediate environments more significant to them. We wish to free young and old from getting lost in front of the screen. By delivering the worldâ€™s information to peopleâ€™s field of view, and by weaving real world objects into interactive narratives, we help people rediscover the real world.&#8221;</strong></p>
<p>Pookatak will release their first game this summer. Currently it is under wraps. But Ori gives us some glimpses of what is to come in the interview below.</p>
<p>In addition to founding Pookatak, Ori is involved in a broader effort to move augmented reality forward. On his blog, <a id="ie5s" title="Games Alfresco" href="http://gamesalfresco.com/" target="_blank">Games Alfresco</a> &#8211; he recently welcomed <a href="http://gamesalfresco.com/about/" target="_blank">a new partner, Rouli Nir</a>, Ori has focused his eye of wisdom on every significant recent advance in Augmented Reality (check out <a id="zr9y" title="this essence of Ori's thinking in a fast paced video" href="http://gamesalfresco.com/2009/03/09/augmented-reality-today-ori-inbar-speaks-at-warm-2009/" target="_blank">this essence of Ori&#8217;s thinking in a fast paced video</a> presentation for <a href="http://gamesalfresco.com/2009/02/12/live-from-warm-09-the-worlds-best-winter-augmented-reality-event/" target="_blank">WARM â€˜09</a>).</p>
<p>Also Ori is one of the organizers of the interactive media track at <a id="b-c6" title="ISMAR 2009" href="http://www.ismar09.org/" target="_blank">ISMAR 2009</a>.Â  At ISMAR this year, Ori explained,<strong> &#8220;we are trying to bring in people that develop interactive experiences for consumers, beyond the traditional attendees coming from a research perspective.</strong>&#8221;</p>
<p>In the interview below, Ori explains much of his thinking on how augmented reality will become commercially successful.Â  Enjoy it, think about it, and share it. And most importantly, if you can, get involved with ISMAR 2009.</p>
<p>OriÂ  has inspired me to participate in <a id="seky" title="ISMAR" href="http://www.ismar09.org/" target="_blank">ISMAR</a> this year.Â  Ori pointed out:</p>
<p><strong>The </strong> <a href="http://campwww.informatik.tu-muenchen.de/ismar09/lib/exe/fetch.php?id=ismar09%253Astart&amp;cache=cache&amp;media=ismar09:ismar09-cfp_090211_final.pdf" target="_blank">call for papers</a> <strong>is on, and this year it targets well beyond the typical research papers audience and into interactive media and art folks. </strong></p>
<p><strong>There are plenty of opportunities such as:</strong></p>
<p><strong>Art Gallery</strong></p>
<p><strong>Demonstrations</strong></p>
<p><strong>Tutorial</strong></p>
<p><strong>Workshops</strong></p>
<p>It&#8217;s a huge opportunity to shape the emergence of augmented reality.<br />
<br /></br></p>
<h2><strong> Interview With Ori Inbar</strong></h2>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-41.png"><img class="alignnone size-full wp-image-3479" title="picture-41" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-41.png" alt="picture-41" width="107" height="146" /></a><br />
<h3>Making Augmented Reality Commercially Successful</h3>
<p><strong>Tish Shute: </strong>You are considered a key trail blazer in AR and you have the go to blog for augmented reality!Â  What are the most important lessons you have learned researching, writing, and developing AR in the last couple of years?</p>
<p><strong>Ori Inbar: You need to have a vision. You need to know where this is going to go in ten or fifteen or twenty years. But you&#8217;ve got to start with something really simple that makes use of the technology you have on hand. And do something that is practical, that people will like, and something they would actually want to buy. Its as simple as that. I&#8217;m currently looking at what we could do with existing technology. First of all, you have to put it in front of people. Right now most people have never heard about the term augmented reality. Go into the street, and ask 100 people about it, maybe 2 would know about it. So you need to put it in front of people because most people think it&#8217;s still science fiction or a special effect you see in movies, not something you can experience in real life. </strong></p>
<p><strong>Tish: </strong>It seems to me to that for augmented reality applications to become popular with existing technology the key breakthrough would be getting people to hold up their phones. What are the obstacles to getting people to use their mobile devices like this?</p>
<p><strong>Ori: There&#8217;s a really nice cartoon by </strong><em> </em><strong><a href="http://www.tonchidot.com/">Tonchidot</a> (below) &#8211; the Japanese company behind the Sekai Camera. It&#8217;s an illustration showing the evolution of man, from ape to man (holding a cell phone looking down), to the developed man holding a device like a camera &#8211; in front of its eyes.</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-37.png"><img class="alignnone size-medium wp-image-3454" title="picture-37" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-37-300x221.png" alt="picture-37" width="300" height="221" /></a><strong></strong></p>
<p><strong>Which is exactly what you&#8217;re talking about. People ask, &#8220;are people going to walk with this like that all day long?&#8221; Probably not. I mean you have to build it in a way that doesn&#8217;t require them to hold it like that all the time. People are used to this gesture with the ubiquitous digital cameras. I tested one of my prototypes on a two and a half year old girl. She had no problem holding it just like she holds a camera.<br />
</strong><br />
<strong>Tish:</strong> <a href="http://www.cc.gatech.edu/~blair/home.html" target="_blank"> Blair MacIntyre</a> mentioned, &#8220;The problem with the mobile phone as a AR device is a problem of awareness,&#8221; i.e., you have to have a way of letting people know when there&#8217;s something interesting wherever they are. One of the issues regarding this is if you get too many alerts, then you tune them out.</p>
<p><strong>Ori: First of all Blair is one of the people in academia that get it. Because he looks at it from an experience perspective. Not just as an interesting technical problem to solve. Let&#8217;s start with getting people to enjoy this new experience. The AR demos so far were mostly eye candies, and mostly for advertising &#8211; the<a href="http://ge.ecomagination.com/smartgrid/#/landing_page" target="_blank"> GE AR ad</a> created a lot of buzz; but you look at it for 10 seconds and you forget about it.Â  You need to build something that people would want to experience over time and would be willing to pay for. I think that&#8217;s the big test, right?</strong></p>
<p><strong>Now in terms of having a ubiquitous experience where you&#8217;re continously connected, it doesn&#8217;t have to be an overwhelming experience. Just like some of the social media tools we&#8217;re using today, we decide when to connect, and we filter out the trash. You could get alerts only for things that really matter to you, not for everything that happens in your immediate environment. </strong></p>
<p><strong>There will be many layers of information, and it&#8217;ll be up to you to pick the ones you want to experience. The real benefit is that you get the information in your own field of view and in context of where you are or what you do.</strong></p>
<p><strong>Tish:</strong> So what are you working on these days?</p>
<p><strong>Ori: We are working on a little app that targets a very different audience than what you&#8217;d expect: pre schoolers. We think we can encourage them to get away from a PC or TV screen and learn something while playing &#8211; in the real world. You&#8217;ll hear more about it as soon as this summer. Nuff said.</strong></p>
<p><strong>But, it is a small application that will run on the iPhone. People ask how many pre-schoolers own iPhones? Well, their parents do. </strong></p>
<p><strong>Tish:</strong> Yes there are certainly many New York kids with iPhones &#8211; my kid now has my old iphone.Â  He has pretty much switched from playing games on his DS to the iPhone. I noticed in your WARM video you place a big emphasis on AR as something that will get kids away from screens and engaged with reality.Â  This is something parents will approve of!</p>
<p><strong>Ori: Yes I saw something really interesting at my kids&#8217; party one day; they were all sitting around the room &#8211; looking down at their own DS screens.Â  You could play the DS anywhere, but kids would usually play it on the sofa, looking at the screen, isolated from the world. With an iPhone and a camera, and the application we&#8217;re producing, reality becomes part of the game. Yes that makes it all of a sudden much more interesting for parents. Because kids are spending so much time in front of the screen, all of a sudden they&#8217;re something that will encourage them to interact with real objects, real things. Every parent I&#8217;ve talked to loves that idea.</strong></p>
<p><strong>Tish:</strong> Yes that is what is cool about the work of <a href="http://www.katilondon.com/" target="_blank">Kati London</a> &#8211; I think I saw someone say this on Twitter, &#8220;Kati puts the computer in the game not the game in the computer.&#8221;</p>
<p><strong>Ori: Yes, kids are spending more time in front of games and the computer because it&#8217;s more interesting. It captivates them with &#8220;<a id="x_z0" title="game pleasures" href="http://8kindsoffun.com/">game pleasures</a> &#8221; that tap into their brain&#8217;s dopamine circuitry &#8211; constantly seeking reward and satisfaction. So you&#8217;re not going to be able to tell them to go back to playing in reality without these pleasures. We have to study these mechanics from games and bring them into reality. It&#8217;s about programming real life; and augmented reality helps you achieve that.</strong></p>
<p><strong>Here&#8217;s an example: cause and effect; in a game when you do something you always get an immediate effect. You&#8217;re good, you get a reward. You&#8217;re not good, you get a cue to improve. In real life you do things and you could wait 2 or 3 years until you actually get feedback (if you&#8217;re lucky). Augmented Reality allows you to bring these mechanics into the real world. I think that&#8217;s going to help kids rediscover reality, in a new sense, which is what every parent is dreaming about.</strong></p>
<p><strong>Tish:</strong> I don&#8217;t know how much you can say about your app. But in regard to doing augmented reality on the iPhone.. there&#8217;s no compass. Is this a limitation?</p>
<p><strong>Ori: True, no compass yet. But the camera gives you a lot of information that you can interact with. When you run the application, you see the world in front of you, and if the app can recognize real life objects &#8211; it can put virtual elements on top of it.</strong></p>
<p><strong>Tish:</strong> But not with any accuracy unless you&#8217;re using markers. Are you using markers?</p>
<p><strong>Or</strong><strong>i: We&#8217;re using natural feature recognition. It doesn&#8217;t have to be an ugly looking marker. It can be any image.</strong></p>
<p><strong>Tish:</strong> So you&#8217;re using image recognition. Are you working with one of these image recognition startup companies (<a id="nws6" title="list here" href="http://www.educatingsilicon.com/2008/11/25/a-round-up-of-mobile-visual-search-companies/" target="_blank">list here</a> )?</p>
<p><strong>Ori: We&#8217;re working with one of those. What&#8217;s unique about it is it runs very nicely on any cell phone, and on the iPhone it works the best. For this first app, it doesn&#8217;t really matter where you are physically; the geolocation is not part of the experience. </strong><span style="background-color: #ffff00;"><br />
<strong><br style="background-color: #ffffff;" /></strong><span style="background-color: #ffffff;"><strong>Tish: </strong> For a truly engaging AR experience we will need more of a backend than is currently available?</span><br />
</span><br />
<strong>Ori: I call the backend the cloud, where you have all this information and ways to access it from anywhere. Actually I think it&#8217;s become pretty mature today. If you look at the different elements required to enable an augmented reality experience to work, you have &#8211; first &#8211; the user whose always in the center. Then you have the lens. The lens can be an iPhone, or glasses, even a projector. The lens allows you to watch, sense and track information in the real world: people, places, things. Then in the backend you have the cloud where you store and retrieve information.</strong></p>
<p><strong>So if you look at the maturity of these different elements, I think the cloud is in pretty good shape. Because there&#8217;s so much information we&#8217;re collecting and storing. Anything from Google, Wikipedia, Facebook, all that kind of stuff, it&#8217;s a lot of useful information you can access from anywhere using APIs. And a lot of it is also starting to include geolocation information. Take <a id="zhag" title="Loopt" href="http://www.loopt.com/" target="_blank">Loopt</a> or Google&#8217;s <a href="http://www.google.com/latitude/intro.html" target="_blank">friends service</a> that allows you to see where your friends are and what they&#8217;re doing. There&#8217;s tons of information out there and it&#8217;s pretty easy to access it. Now what do you do with it is the question?</strong></p>
<p><strong><a href="http://www.mobilizy.com/wikitude.php" target="_blank">Wikitude</a> is such a simple and brilliant application and nobody thought about doing it until this guy from Salzburg did. It doesn&#8217;t have any sophisticated visual tracking. It knows your position and it&#8217;s simply looking at the angle you&#8217;re pointing to. Based on these parameters it brings information from Wikipedia that pertains to your field of view. So most of it was already there. It&#8217;s just a matter of connecting the pieces in an experience that is valuable for people.</strong></p>
<p><strong>Tish: </strong>It is the uptake of even a very simple technology that puts the magic in it.</p>
<p><strong>Ori:Â  Yes, take Twitter. If you go to its homepage it looks like a very simple boring app but it is something that is both enjoyable and very useful to people.</strong></p>
<h3><strong>Why you should participate in ISMAR 2009</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-40.png"><img class="alignnone size-medium wp-image-3478" title="picture-40" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-40-222x300.png" alt="picture-40" width="222" height="300" /></a><br />
<strong>Tish: </strong>I know that you are involved in organizingÂ  <a id="seky" title="ISMAR" href="http://www.ismar09.org/" target="_blank">ISMAR</a> (picture above from Ori&#8217;s post on <a href="http://gamesalfresco.com/2009/02/23/ismar-2009-the-worlds-best-augmented-reality-event-wants-you-to-contribute/" target="_blank">&#8220;ISMAR 2009: The World&#8217;s Best Augmented Reality Event&#8230;,</a>&#8220;) and there is a call out for papers and for volunteers, can you tell me more about it?</p>
<p><strong>Ori: Yes, we hope to have the first ISMAR where we practice what we have just discussed: let&#8217;s build on all the research invested so far and instead of thinking only about 5-10 years from now, let&#8217;s see what we can do today. So we are bringing people in from other disciplines &#8211; artists, interactive media developers and people from the entertainment industry.Â  The goal is to use the technology to make something interesting for people &#8211; again, something that people would buy, and making it commercially successful.Â  Many people either don&#8217;t know about ISMAR because in the past it was a pure engineering-orientated event and peopleÂ  from a commercial perspective of AR weren&#8217;t attracted to it.Â  The Chair of the Event this year is based in Florida and he is going to bring in a lot of people from the entertainment industry such as Disney. I think this will transform this event into something more like SIGGRAPH &#8211; more of an industry event.Â  As one of the organizers of the interactive media track we are trying to bring in people that want to build applications for consumers.</strong></p>
<p><strong>Tish:</strong> In terms of AR applications what are the flagships today?</p>
<p><strong>Ori: There are very few because it&#8217;s just the beginning. There&#8217;s one tiny studio in France called <a id="z1ln" title="Int 13" href="http://www.int13.net/en/" target="_blank">Int 13</a> . They&#8217;ve created maybe the first commercial game running on a mobile device using AR technology. It&#8217;s called <a href="http://www.youtube.com/watch?v=Te9gj22M_aU" target="_blank">Kweekies</a>. It was one of the contenders for the Nokia Mobile innovation awards. They were one of the ten finalists, but they didn&#8217;t win it. It&#8217;s looks really cool. It&#8217;s somethng that runs on your desk, with a marker. Many AR folks say markers are the past, markers are ugly. But it&#8217;s still a cool experience. I think people will go for it.</strong></p>
<p><strong>Tish:</strong> Yes I think we will have to look to small companies that are free to think creatively to lead the way.Â  It seems many games companies are tied up pulling off huge big budget projects and enterprise is still catching up on how to use social media!</p>
<p><strong>Ori: Yes, last year I was in the game development conference (GDC); there was no mention of augmented reality &#8211; not on the exhibition floor, none of the sessions, nobody talked about it. I was stunned. Then this year, there was a little a change. There were like three demos on the exhibition floor, <a href="http://www.metaio.com/" target="_blank">Metaio,</a> <a href="http://www.vuzix.com/home/index.html" target="_blank">Vuzix</a> and a Dutch company called <a href="http://www.augmented-reality-games.com/" target="_blank">Beyond Realit</a>y.Â  And then there was Blair&#8217;s talk, which was very very cool. The room was packed with people. And after the talk there were dozens of people lining up to talk with him about the topic. There was definitely interest, but still on the very edge. The video game industry is still a hit driven business and publishers spend upward of 20-30 million dollar to create the best AAA game possible. They just can&#8217;t take the risk. So it&#8217;s going to come from smaller companies, from outsiders coming in with a vision and understanding on how to put the AR pieces together to create a totally new experience.</strong></p>
<p><strong>Tish:</strong> But the basic tool set is there isn&#8217;t it?</p>
<p><strong>Ori: I talked to some folks at the games developer conference, many folks with MMO background, and they have great ideas about AR. It&#8217;s great to see different people with different views on what&#8217;s needed first. &#8220;Joe the Programmer&#8221; had this idea of creating a small piece of hardware that you can put in every house and provide accurate geospatial information in your home. That couldÂ  open up many opportunities for AR experiences in homes.</strong></p>
<p><strong>Tish:</strong> Don&#8217;t you think we have enormous resources in terms of image databases that provide a great basis for augmented reality.Â  I was talking to Aaron Cope at ETech about <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">The Shape of Alpha</a> &#8211; Flickr&#8217;s vernacular mapping project using all the geotagged photos in Flickr. That is such cool project. <a href="http://en.oreilly.com/where2009/public/schedule/speaker/43824" target="_blank">Aaron will be speaking at Where 2.0</a> also.</p>
<p><strong>Ori: Think of Google Earth. Google Earth leveraged communities to basically map all the major cities around the world into 3D models. And that is an essential step to be able to do augmented reality outdoors. Because if you had to model everything from scratch, it wouldn&#8217;t be realistic.</strong></p>
<h3><strong>Augmented Reality and Becoming Greener.</strong></h3>
<p><strong>Tish:</strong> I am really interested in how AR interfaces might be useful to some of the emerging energy identity/metering projects like <a href="http://www.amee.com/" target="_blank">AMEE</a> and <a href="http://www.wattzon.com/" target="_blank">WATTZON</a> because I think it is very important that people have very intuitive, immediate, and enjoyable ways to relate to energy data so they can make greener choices.</p>
<p><strong>Ori: Back in the day I had an idea to build an Augmented Reality application to become greener. You look at things around your home with the camera and itÂ  recognizes its green gas footprint and makes recommendations to reduce it.Â  I guess it was a bit too early to do that based on visual recognition alone&#8230;you&#8217;d needÂ  additional sensors that would provide related information about what you are looking at.</strong></p>
<p><strong>Tish:</strong> Well as there is more interest in Green technology do you think we may see VC interest in some green AR projects now?</p>
<p><strong>Ori: I talked to some of the investment folks, Angels as well as VC&#8217;s about AR and they had no clue what it is. There&#8217;s a need for a whole lot of education. And there are no proof points (as in successful investments in this domain), and counter to popular belief &#8211; they don&#8217;t like risk so much&#8230;</strong></p>
<p><strong>Tish:</strong> And consumer adoption must lead the way, right?</p>
<p><strong>Ori: Just like with every emerging technology in history, people never bought the technology, they bought the content, the apps, the benefits that came on top of the technology. Whether it was VHS winning over Beta Max, or BluRay winning over HD. It&#8217;s always because of more/better content. Look at the video game console war: Xbox, and Nintendo did better than Sony just because they had more and better games. Even Windows was a success thanks to its applications. People bought it for the applications not the OS. The content is the first to drive demand.</strong></p>
<p><strong>Tish:</strong> One of the challenges to giving people new ways to relate to their energy consumption is that you can just have them looking at graphs of how bad they have been in the past you &#8211; that may make them feel bad but that doesn&#8217;t necessarily give them ways or motivation to change. There perhaps needs to be more immediate relationship to the data to facilitate change. I think the mantra for optimization of anything from energy usage to supply chains is timely, actionable data?</p>
<p><strong>Ori: There are a lot of ideas about measuring information and displaying it to people. For example, the Prius hybrid car, one of its interesting features &#8211; which is kind of game like &#8211; is a constant display of your current fuel consumption. That alone changes how people drive because they try to beat the &#8220;Score&#8221; and as a result conserve more fuel. That model can be applied to our homes&#8230;</strong></p>
<p>Tish: Yes that is something I am very interested in. I have been following several projects in this area &#8211; one of my favorites is the <a href="http://www.arduino.cc/" target="_blank">Arduino</a>, <a href="http://www.currentcost.com/" target="_blank">Current Cost</a>/<a href="http://www.ladyada.net/make/tweetawatt/" target="_blank">Tweetawatt</a>, <a href="http://www.pachube.com/" target="_blank">Pachube</a> integrations <a href="http://www.ugotrade.com/2009/04/24/homecamp-2-home-energy-management-and-distributed-sustainability/" target="_blank">I saw at Homecamp</a>.</p>
<p>You joined a start up with Shai Agassi which was bought out by SAP right? He has a brilliant approach with Better Place.</p>
<p><strong>Ori:Â  I think what&#8217;s really unique about Better Place&#8217;s approach is that he doesn&#8217;t require people to change their behavior. People are still going to have their own cars. They&#8217;ll be able to drive as far as they want, and for the same (or lower cost). Its not necessarily about a new technology, electric cars have been around for a long time but there was no way people were going to be limited by the 50 or 70 mile range and Better Place is solving that problem. With its infrastructure of charging spots and battery switching stations, drivers are going to be able to drive anywhere. And it&#8217;ll be similar to having to stop once in a while to refuel your car. The price maybe even lower than what you pay today for your transportation needs &#8211; and you&#8217;ll stop generating green gas. It&#8217;s a clever way of taking technology to a whole new level without changing the behavior of people.</strong></p>
<p><strong>Tish: </strong>Better Place is a classic example of things as a service isn&#8217;t it?Â  It is basically a utility company.</p>
<p><strong>Ori: It is similar to a phone carrier model.Â  You pay for a membership that gives you access to the car (equivalent to the phone) and electricity (equivalent to the phone line) for the same price of fuel cost today. And as bonus you get to save the world.</strong></p>
<h3><strong>How the iphone changed the game for AR &#8211; and the iphone versus Android</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-38.png"><img class="alignnone size-medium wp-image-3472" title="picture-38" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-38-300x198.png" alt="picture-38" width="300" height="198" /></a><em></em></p>
<p><em>Picture from Ori&#8217;s post</em><strong><em>, <a href="http://gamesalfresco.com/2009/03/23/gdc-2009-why-the-iphone-just-changed-everything/" target="_blank">&#8220;GDC 2009: Why the iphone changed everything&#8221; </a></em></strong></p>
<p><strong>Ori: And back to AR, you have to take the same approach, because nobody&#8217;s wants to don those huge head mounted displays or backpacks. You have to take advantage of people&#8217;s current behavior: they already carry their iPhones or similar devices.</strong></p>
<p><strong>Tish:</strong> As we discussed, you just have to get people raising up their phones and looking through them when that is a useful thing to do. Both Wikitude and Nathan Freitas&#8217;s graffiti app were enough to get me interested in the evolutionary step of raising my phone! Nathan&#8217;s graffiti app is nice. You leave a marker for your graffiti so other people can find view/add their own &#8211; a nice primal experience like pissing on the lamp post to let your pack know where youâ€™ve been.Â  Also the graffiti app taps into a long history ofÂ  NYC street culture around tagging and graffiti art (see my interview, <a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">&#8220;Is it OMG finally for Augmented Reality?&#8221;</a>).</p>
<p><strong>Ori: The app store has fundamentally changed the mobile gaming industry. Last year they were in shambles. There was no growth. Everybody was complaining, &#8220;we can&#8217;t handle it, there&#8217;s a million phones, and you have to test it on each phone. And carriers suck, they don&#8217;t care about sharing and promoting your content. Everything was bad. This year mobile gaming is the hottest thing. And it&#8217;s all because of the iPhone. It changed the game.</strong></p>
<p><strong>Tish: </strong>How do you think Android is going to get traction against the iphone?</p>
<p><strong>Ori: Well the number one thing is the form factor &#8211; the iPhone is just much cooler than the G1. Its OK but it doesn&#8217;t have the same feel. People thought it was going to be easy to clone the iPhone but none of the attempts succeeded so far.</strong></p>
<p><strong>Tish: </strong>How much does it matter for AR not being able to runs things persistently in the background on the iphone?</p>
<p><strong>Ori: Actually they have add a such a capability in OS 3.Â  You can now make use of a background service.</strong></p>
<p><strong>Tish:</strong> OS 3 will open up new possibilities for AR?<strong> </strong></p>
<p><strong>Ori: The access to the video API is still not public.Â  But there is a new Microsoft application &#8211; Microsoft Tag that makes use of that API which means it is probably OK to use it.</strong></p>
<p><strong>Tish: </strong>(I ask Ori for his card and he shows me how to read it with my iphone.) Oh nice you have an AR card, of course!</p>
<h3><strong>In Search of Pong for Augmented Reality</strong></h3>
<p><strong>Tish: </strong>So how will AR begin to, as Blair&#8217;s friend put&#8217;s it, &#8220;facilitate a killer existence,&#8221; particularly as we are probably looking at some new and perhaps pricey hardware?</p>
<p><strong>Ori: You could take the Better Place approach. We&#8217;re going to give you a great experience and we&#8217;ll include the devices as part of that experience for the same price. Let&#8217;s say you subscribe to an AR experienceÂ  which offers access to multiuser, support, and all the information you need wherever you go &#8211; exactly according to the vision. You pay for a subscription on a monthly basis and included in that cost we give you a better device that offers aÂ  better AR experience. It&#8217;s following the phone carrier approach, but in a good way.</strong></p>
<p><strong>But first of all we do need our Pong! I was sitting with a couple of AR game enthusiasts at the GDC and we were asking ourselves, &#8220;how do we create the first pong for AR?&#8221;</strong></p>
<p><strong>Was Pong a multiplayer game? Not necessarily! Did it connect to the network? No! We have to create the first dot in a long line of dots that will bring us to our destination.</strong></p>
<p><strong>Tish: </strong>You haven&#8217;t seen a Pong yet have you?</p>
<p><strong>Ori: Not yet. I mean there&#8217;s maybe a handful of games and apps out there, but I don&#8217;t think any of them is a Pong yet. Still, it&#8217;s getting closer.</strong></p>
<p><strong>Tish: </strong>Kati London is doing some very interesting work on bringing games into reality, isn&#8217;t she?</p>
<p><strong>Ori: Yes, she works with Frank Lanz at <a href="http://playareacode.com/" target="_blank">Area/Code</a>. He teaches at NYU and has designed games for the <a href="http://www.comeoutandplay.org/" target="_blank">&#8220;Come Out and Play&#8221;</a> festival here in Manhattan. And a lot of these games are actually low tech.</strong></p>
<p><strong>Tish:</strong> Yes I have a big alternate reality game blog brewing that I haven&#8217;t had time to write yet!</p>
<p><strong>Ori: The city is the gameboard is their slogan. It&#8217;s going to be a great playground for AR games. The city becomes a theme park. The city could become an even bigger touristic attraction. People will come to the city to be part of these games. So you&#8217;re having thousands of people running around the city playing all sorts of games from laser-tag style to history adventures, to treasure hunts.</strong></p>
<h3><strong>Composing Reality</strong></h3>
<p><strong>Tish: </strong>So why haven&#8217;t you focused on one of these kinds of games with your company?</p>
<p><strong>Ori: We have a couple of scenarios along these lines that we&#8217;re planning for 2010-11. But first focus on what&#8217;s possible today.</strong></p>
<p><strong>Tish: </strong>And what&#8217;s stopping you from doing those kind of games today?</p>
<p><strong>Ori: Many things. The devices are not there yet, location services are not accurate enough, ubiquitous sensors are notÂ  there yet.</strong></p>
<p><strong>Tish: </strong>You think alternate reality gaming needs more &#8220;ubiquity&#8221; than is currently available?</p>
<p><strong>Ori: Not necessarily. People are doing alternate reality games with no &#8220;ubiquity&#8221; at all. But my interest is to add the visual aspect. I believe humans are mostly driven visually.</strong></p>
<p><strong>Jane McGonigal said in a talk at GDC, that AR would allow us to program reality, which is exactly how I look at it. Once you can recognize things, some of it with WiFi and RFID and all sorts of sensors. But visual sensors is always going to be the ultimate way to recognize things. And once you recognize things and know what they are, and can pull information about those things (or people and places) from the internet, you can program it (visually). You could program it to be fictional, like in a video game, or it could be programmed as non-fictional, like a documentary. And that allows you to do things that before were unimaginable.</strong></p>
<p><strong>Tish: </strong>But you can&#8217;t forget the visual, it is primary the connection to peoples&#8217; primary sensory relationships.</p>
<p><strong>Ori: Yes, it&#8217;s like you go to a grocery store and you pick your vegetables, a lot of it is by sight and by touch. And what if you could also see just by looking at it that it&#8217;s from a local store, and that it&#8217;s organic?</strong></p>
<p><strong>Tish:</strong> It goes beyond overlays really?</p>
<p><strong>Ori: By the way, I don&#8217;t like the term &#8216;overlay&#8217;. I know that&#8217;s how it looks: you either overlay or superimpose, but I&#8217;m still searching for a better term. A term I prefer to use is &#8220;composing reality&#8221;. Just like painters, they use brushstrokes and colors and compose a painting. We need to take the real element and the virtual element and compose them into something new. It&#8217;s not just about slapping one on top of the other.</strong></p>
<p><strong>Tish: </strong>yes I think the idea of dashboards is not so appealing.</p>
<h3><strong>Pookatak Games</strong></h3>
<p><strong>Tish: </strong>Do you want to explain the evolution of your company? You have an interesting history of success with high end enterprise applications.</p>
<p><strong>Ori: Since I was a kid I wanted to invent and create things. When I discovered software, that was a really cool way of actually creating things from nothing. From thin air; and you can do it very quickly. That&#8217;s what brought me into software. But I was always looking for the intersection between technology and art. Looking for ways to bring these things together. In the early nineties virtual reality was doing it. It had the appeal of cutting edge technology that can be combined with art. But then, as we all know, it crashed. So I joined Shai Agassi&#8217;s startup (who is now doing Better Place) back in the early nineties. I was one of the first employees in his startup which was developing multimedia products. I was leading the development of one of its flagship product. At some point we realized the technology could be great for an enterprise environment.</strong></p>
<p><strong>It was a really great experience. First going through this cycle from a very small startup and growing into this multi billion dollar business. I was responsible for defining and marketing SAP&#8217;s platform, which was called Netweaver. It was just an idea when we joined SAP and by the time I left it was a major, major business for SAP. I learned about the challenges of building a platform. No matter what purpose you&#8217;re building it for, it typically has similar rules. It&#8217;s definitely not just about the technology; the content that comes with it is really key to making a platform successful.</strong></p>
<p><strong>The third part of this platform trifecta is the community. If you don&#8217;t build a community, you won&#8217;t get the critical mass required for adoption. It may be your own platform but it&#8217;s not necessarily the people&#8217;s platform. That experience is very key to what we&#8217;re doing today. Now, a new industry is being born on the basis of a remarkable technology. But to drive adoption, first we&#8217;ll need good content. The content will be created using today&#8217;s technology with internal tools developed to simplify the process. Next step would be to make the tools used internally &#8211; available to other developers. Help scale the industry, enable innovation on a larger scale. That way we have a chance to create a platform. So it isn&#8217;t really just about my company. I&#8217;m so passionate about augmented reality, I want to it to become a healthy and successful industry for the next 5, 10, 15 years.</strong></p>
<p><strong>Tish: </strong>Yes I am so ready to be liberated from the sitting behind a computing screen! And I know that all this hardware is murdering the environment.</p>
<p><strong>Ori: There&#8217;s &#8216;s the book by Rolf Hainich which is called &#8220;<a id="ba8p" title="The End Of Hardware" href="http://www.theendofhardware.com/">The End Of Hardware.</a> &#8221; It&#8217;s about hardware for augmented-reality. Once you use goggles or other AR interfaces you eliminate the need for screens, laptops, etc. It&#8217;s going to be great for the environment. You have read Rainbow&#8217;s End, right? According to the book in few years there will barely be any (visible) hardware. At least it&#8217;ll have a much smaller footprint for the environment. And it&#8217;ll touch every aspect of life, everything you do. It&#8217;ll change the way you interact with the world.</strong></p>
<h3><strong>The Illusive Eyewear for Immersive AR.</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/retroar-googlespost.jpg"><img class="alignnone size-medium wp-image-3469" title="retroar-googlespost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/retroar-googlespost-300x225.jpg" alt="retroar-googlespost" width="300" height="225" /></a><br />
<em>Friend of Ori&#8217;s in San Francisco wearing retro AR goggles (from <a href="http://gamesalfresco.com/2009/05/04/gdc-2009-roundup-a-tiny-spark-of-augmented-reality/" target="_blank">Games Alfresco, Ori&#8217;s roundup of GDC 2009</a>)</em></p>
<p><strong>Tish:</strong>OK lets talk about goggles.</p>
<p><strong><strong>Ori: Goggles are going to happen, we want to be hands free.</strong></strong></p>
<p><strong>It&#8217;s going to happen because it&#8217;s just a more intuitive way to use this technology. But above all it has to look cool. Because if it&#8217;s not, if it&#8217;s a big headset, then maybe a small percent of the population might use it, but most people won&#8217;t. It has to look like an accessory, like new cool eyeglasses that you just must wear.</strong></p>
<p><strong>I recently talked to a friend, who runs an industrial design firm, and has experience in designing such glasses for companies like Microvision and Lumux. He says that when you try to bring the images so close to our eyes &#8211; there are some really hard problems to solve. Otherwise it can become really annoying and cause dizzyness.</strong></p>
<p><strong>But I&#8217;m optimistic. I believe it&#8217;s going to happen 3 to 5 years from now. It&#8217;s already starting now: Vuzix announced goggles that will be available this year. Some AR apps that are going to take advantage of next year. Initially only a fraction of the population will use it. And that&#8217;s going to help advance it and make it better and better. But it&#8217;s going to take time until it reaches the mass market.</strong></p>
<p><strong>Tish:</strong> In virtual worlds we have seen, I think, a lot of mistakes in terms of reinventing the wheel and producing too many proprietary versions of the same thing and not enough concerted effort on standards and open platforms that could create a vibrant ecosystem.Â  How can augmented reality not make the same mistakes?</p>
<p><strong>Ori: There are some early AR open source efforts ARTookit, ARtag but it is not a movement yet.Â  One of the things we&#8217;re trying to do at ISMAR this year is to put togetherÂ  discussions around key industry issues, such as standards. Some people say it&#8217;s too early, you have to have a defacto standard to start from. But pretty soon it&#8217;s going to be too late. Just like with virtual worlds, all of a sudden you have all these islands that don&#8217;t talk to each other. Why get to that point if we can plan to avoid it? Let&#8217;s start thinking about it right now. On the other front there are devices. There are pockets of people working on adapting devices for AR, second guessing the hardware companies. Why not get them together with the Intels and Nvidias of the world, and discuss what this device should be able to do. And then compete to make it happen.</strong></p>
<p><strong>Tish: </strong>How much luck are you having with this discussion part?</p>
<p><strong>Ori: People are very interested in doing this. We proposed these panels for ISMAR. And I&#8217;ve got some key people already on board. They have tons of input, they want to get involved. We&#8217;ll see how much we can actually get out of it.</strong></p>
<p><strong>Tish: </strong>In virtual worlds it was a while before vibrant opensource communities developed.Â  OpenSim has I think been the breakthrough community in this regard.</p>
<p><strong>Ori: You have to think about the elements up front. The dream job is to architect the industry. Say we agree on the required pieces. Then we could help the right companies succeed in delivering the pieces. Next, we have to collaborate so that these pieces talk to each other. And eventually these communication methods will become defacto standards and most developers will adopt it.</strong></p>
<p><strong>Tish: </strong>So I&#8217;m going to put you in the role. You&#8217;ve got your dream job. You&#8217;re going to architect this community. So what are the key pieces and where would you like to see the open source communities take hold first?</p>
<p><strong>Ori: Open source will not be exclusive. It&#8217;s going to live side by side with proprietary technology.</strong></p>
<p><strong>The key pieces? You have the user at the center. And the user interacts with a lens. The lens includes both the hardware and the software. And then the lens senses and interacts with the world, which includes people, things and places. And these people-things-places emit information &#8211; about who they are, where they are, what they&#8217;re doing, etcÂ  &#8211; which is then stored in the cloud.</strong></p>
<p><strong>And then you have the content providers, the people and companies, composers who weave AR experiences through the pieces we mentioned before. These composers need a platform that glues these pieces together. Pieces of the platform will be on the lens, and in the world, and in the cloud. If you manage to remove the frictions, and connect these pieces into an experience that people like &#8211; then you have a platform. What the platform does it reduces the overhead and accelerates innovation.</strong></p>
<p><strong>Tish: </strong>Another problem virtual worlds faced in their development was their isolation from the world wide web.Â  Will augmented reality avoid this plight?</p>
<p><strong>Ori:Â  Yes, I believe the key, like you said before, is not to reinvent the wheel. The cloud is already there.Â  Take Wikitude for example, all <a href="http://www.mobilizy.com/" target="_blank">Mobilizy</a> had to do is buildÂ  a relatively simple client app, connected to wikipedia, and all of a sudden it offered a wealth of information in your field of view.</strong></p>
<p><strong>I think we can learn a lot from web 2.0. For example, in order to have a ubiquitous experience like <a href="http://www.curiousraven.com/" target="_blank">Robert Rice</a> and others are striving for, you&#8217;ll need to 3d map the world. Google earth like apps are going to help but it is not going to be sufficient. So let&#8217;s leverage people. Google became successful in part by making people work with them.Â  Each time you create a link from your blog to my blog their search engines learn from it.Â  So let&#8217;s find ways to make people create information that can be used for AR.</strong></p>
<p><object width="425" height="344" data="http://www.youtube.com/v/GTXtW3W8mzQ&amp;hl=en&amp;fs=1" type="application/x-shockwave-flash"><param name="allowFullScreen" value="true" /><param name="allowscriptaccess" value="always" /><param name="src" value="http://www.youtube.com/v/GTXtW3W8mzQ&amp;hl=en&amp;fs=1" /><param name="allowfullscreen" value="true" /></object></p>
<p><em>Ori Inbar directed <a title="Wiki Mouse" href="http://www.youtube.com/watch?v=GTXtW3W8mzQ" target="_blank">Wiki Mouse</a> &#8211; a WIKI Film co-created by a swarm of movie makers around the world.</em></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/feed/</wfw:commentRss>
		<slash:comments>12</slash:comments>
		</item>
		<item>
		<title>Sensor Networks and Sustainability: &#8220;Connecting Real, Virtual, Mobile and Augmented Spaces&#8221;</title>
		<link>https://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/</link>
		<comments>https://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/#comments</comments>
		<pubDate>Sun, 19 Apr 2009 06:32:59 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[3D internet]]></category>
		<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[CurrentCost]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[HomeCamp]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[MQTT and RSMB]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[OpenSim]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[realXtend]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual HomeCamp]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[Carbon Goggles]]></category>
		<category><![CDATA[distributed sustainability]]></category>
		<category><![CDATA[home energy management]]></category>
		<category><![CDATA[open data]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[sensor networks and sustainability]]></category>
		<category><![CDATA[SHASPA]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[TweetaWatt]]></category>
		<category><![CDATA[Virtual Worlds]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3381</guid>
		<description><![CDATA[Today, I did a presentation, on connecting real, virtual, mobile, and augmented spaces to support sustainability, for Earth Week SL, with Dave Pentecost and Jim Purbrick, who presented on Carbon Goggles. Dave and I focused on sensor networks, open data, Pachube, OpenSim, and sustainability from perspective of, &#8220;hack local, think global.&#8221;Â  Dave and I will [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-21.png"><img class="alignnone size-medium wp-image-3382" title="picture-21" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-21-300x225.png" alt="picture-21" width="300" height="225" /></a></p>
<p>Today, I did a presentation, on <a href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj" target="_blank">connecting real, virtual, mobile, and augmented spaces to support sustainability,</a> for <a href="http://slearthweek.wordpress.com/2009/04/10/earth-week-press-release-see-schedule-also/" target="_blank">Earth Week SL</a>, with <a href="http://www.gomaya.com/glyph/" target="_blank">Dave Pentecost</a> and <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a>, who presented on <a href="http://carbongoggles.org/" target="_blank">Carbon Goggles</a>.</p>
<p>Dave and I focused on sensor networks, open data,<a href="http://www.pachube.com/" target="_blank"> Pachube</a>,  <a href="http://opensimulator.org/wiki/Main_Page" target="_blank">OpenSim,</a> and sustainability from perspective of, &#8220;hack local, think global.&#8221;Â  Dave and I will be picking up on some of these themes of sensor networks and sustainability next week in our presentation with <a href="http://www.darleon.com/" target="_blank">Dimitri Darras</a> at ITP,Â  NYU, Aprl 24th, 6.30 pm to 8 pm &#8211; <a href="http://itp.nyu.edu/sigs/news/special-event-open-sim/" target="_blank">details here</a>.Â  If you are in New York City, I hope to see you there.</p>
<p>We got some interesting insights into augmented reality from <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a> whose <a href="http://carbongoggles.org/" target="_blank">Carbon Goggles</a> project prototypes how we can use augmented reality to read carbon identity and to combine well organized, verified data from <a href="http://www.amee.com/" target="_blank">AMEE</a> &#8211; a neutral aggregation platform to measure the &#8220;carbon footprint&#8221; of everything on earth, with crowd sourced tagging and linking.</p>
<h3>Shaspa &#8211; &#8220;the sensor network system that has it all&#8221;</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-22.png"><img class="alignnone size-medium wp-image-3391" title="picture-22" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-22-300x224.png" alt="picture-22" width="300" height="224" /></a></p>
<p>We also discussed, recently launched, <a href="http://www.shaspa.com/" target="_blank">Shaspa</a>. Shaspa&#8217;s energy management packages connect spaces &#8211; real, virtual, mobile and augmented.Â  Shaspa has been bloggedÂ  by <a href="http://www.maxping.org/business/real-life/virtual-management-of-energy-consumption-in-the-home.aspx/" target="_blank">Maxping</a> and <a href="http://www.virtualworldsnews.com/2009/04/shaspa-launches-home-energy-organizer-on-opensim.html" target="_blank">Virtual World News</a>, so you can read all about it, but the Shaspa device kit won&#8217;t be available until next week. Some key features of the Home EnergyÂ  package are listed on the slide above.Â  However, this evening, Dave Pentecost and I got a sneak preview of both the Shaspa commmunity and enterprise hardware and software packages from Shaspa founder Oliver Goh. We were pretty impressed.</p>
<p><strong>Dave:</strong> &#8220;<strong>It&#8217;s the ultimate hackable device for energy management!&#8221;</strong></p>
<p><strong>Oliver:</strong> <strong>&#8220;Bring us any sensor device &#8211; with documentation, and within three days we will put a driver into Shaspa.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/daveandoliverpost.jpg"><img class="alignnone size-medium wp-image-3392" title="daveandoliverpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/daveandoliverpost-300x178.jpg" alt="daveandoliverpost" width="300" height="178" /></a></p>
<p>Oliver is on the right and Dave on the left in the picture above. The picture below shows Shaspa in OpenSim. Oliver and I will be attending the <a href="http://www.3dtlc.com/"><span style="color: #810081;">3D Training, Learning and Collaboration</span></a> Conference in Washington, DC, next week.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-23.png"><img class="alignnone size-medium wp-image-3412" title="picture-23" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-23-300x208.png" alt="picture-23" width="300" height="208" /></a></p>
<h3>Links</h3>
<p>Here are some of the links that came up in the presentation as many people asked for them to be published. Dave also has them on <a href="http://www.gomaya.com/glyph/archives/002520.html#002520" target="_blank">his blog</a>.</p>
<p>SLIDES on GOOGLE DOCS:<br />
<a title="Earth Week SL Presentation, April 18th, 2009 - Google Docs" href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj">Earth Week SL Presentation, April 18th, 2009 &#8211; Google Docs</a></p>
<p><a href="http://www.ugotrade.com/2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">Pachube, sensor networks</a></p>
<p><a href="http://www.gomaya.com/glyph" target="_blank">Dave&#8217;s blog covering Maya archaeology, jungle ecology, and technology</a></p>
<p><a href="http://www.gomaya.com/glyph/archives/001914.html" target="_blank">Maya Frontier, Usumacinta River videos</a></p>
<p><a href="http://en.wikipedia.org/wiki/Collapse_(book)" target="_blank">Collapse</a></p>
<p><a href="microcontrollers http://arduino.cc/" target="_blank">Arduino</a></p>
<p><a href="http://community.pachube.com/tutorials" target="_blank">Pachube &#8211; tutorials</a></p>
<p><a href="http://apps.pachube.com/" target="_blank">Pachube Apps </a>-</p>
<p><a href="http://www.pachube.com/feeds/1284" target="_blank">Arduino-SL-Pachube data site</a></p>
<p><a href="http://www.pachube.com/feeds/1505" target="_blank">SL to Pachube site</a></p>
<p><a href="http://www.zachhoeken.com/connecting-to-the-world" target="_blank">Dave&#8217;s Danger Shield &#8211; Pachube  tutorial</a></p>
<p><a href="http://www.ladyada.net/make/tweetawatt/" target="_blank">TweetaWatt site (LadyAda)</a></p>
<p><a href="http://www.gomaya.com/glyph/archives/002505.html" target="_blank">Dave&#8217;s post on TweetaWatt to Opensim/SL</a></p>
<p><a href="http://peterquirk.wordpress.com/2008/12/22/tutorial-using-the-streamlined-tool-chain-for-importing-sketchup-models-into-realxtend-04/" target="_blank">Peter Quirk&#8217;s post on Importing Sketchup into RealXtend</a></p>
<p><a href="http://opensimulator.org/wiki/Main_Page" target="_blank">Opensim</a></p>
<p><a href="http://www.realxtend.org/" target="_blank">RealXtend</a></p>
<p><a href="http://reactiongrid.com/" target="_blank">ReactionGrid</a></p>
<p><a href="http://homecamp.pbwiki.com/" target="_blank">homecamp</a></p>
<p><a href="http://www.cminion.com/wordpress/" target="_blank">cminion -wind turbines in OpenSim</a></p>
<p><a href="http://mikethebee.mevio.com/" target="_blank">MiketheBee</a></p>
<p><a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">Is it &#8220;OMG finally&#8221; for Augmented Reality?</a></p>
<p><a href="http://www.ugotrade.com/2008/12/15/smart-planetinterview-with-andy-stanford-clark/" target="_blank">Smart Planet: Interview with Andy Stanford-Clark</a></p>
<p><a href="http://www.orangecone.com/" target="_blank">Orange Cone &#8211; Information Shadows and Things as Services</a></p>
]]></content:encoded>
			<wfw:commentRss>https://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/feed/</wfw:commentRss>
		<slash:comments>2</slash:comments>
		</item>
	</channel>
</rss>
